var/home/core/zuul-output/0000755000175000017500000000000015140222632014522 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015140231734015471 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000321345015140231614020253 0ustar corecore3ikubelet.log_o[;r)Br'o-n(!9t%Cs7}g/غIs,r.k9GfBgm>K}6b}Wߟ/nm͊wqɻlOxN_ Ç/ixK|1Ool_~yyiw|zxV^֯Li.`|!>ڌj+ACl21E^#QDuxGvZ4c$)9ӋrYWoxCNQWs]8M%3KpNGIrND}2SRCK.(^$0^@hH9%!40Jm>*Kdg?y7|&#)3+o,2s%R>!%*XC7Ln* wCƕH#FLzsѹ Xߛk׹1{,wŻ4v+(n^RϚOGO;5p Cj·1z_j( ,"z-Ee}t(QCuˠMkmi+2z5iݸ6C~z+_Ex$\}*9h>t m2m`QɢJ[a|$ᑨj:D+ʎ; 9Gacm_jY-y`)͐o΁GWo(C U ?}aK+d&?>Y;ufʕ"uZ0EyT0: =XVy#iEW&q]#v0nFNV-9JrdK\D2s&[#bE(mV9ىN囋{V5e1߯F1>9r;:J_T{*T\hVQxi0LZD T{ /WHc&)_`i=į`PÝr JovJw`纪}PSSii4wT (Dnm_`c46A>hPr0ιӦ q:Np8>R'8::8g'h"M{qd 㦿GGk\(Rh07uB^WrN_Ŏ6W>Bߔ)bQ) <4G0 C.iTEZ{(¥:-³xlՐ0A_Fݗw)(c>bugbǎ\J;tf*H7(?PЃkLM)}?=XkLd. yK>"dgӦ{ qke5@eTR BgT9(TڢKBEV*DDQ$3gFfThmIjh}iL;R:7A}Ss8ҧ ΁weor(Ё^g׬JyU{v3Fxlţ@U5$&~ay\CJ68?%tS KK3,87'T`ɻaNhIcn#T[2XDRcm0TJ#r)٧4!)'qϷכrTMiHe1[7c(+!C[KԹҤ 0q;;xG'ʐƭ5J; 6M^ CL3EQXy0Hy[``Xm635o,j&X}6$=}0vJ{*.Jw *nacԇ&~hb[nӉ>'݌6od NN&DǭZrb5Iffe6Rh&C4F;D3T\[ bk5̕@UFB1/ z/}KXg%q3Ifq CXReQP2$TbgK ء#AZ9 K>UHkZ;oﴍ8MEDa3[p1>m`XYB[9% E*:`cBCIqC(1&b f]fNhdQvݸCVA/X_]F@?qr7@sON_}ۿ릶ytoy͟מseQv^sP3.sP1'Ns}d_ս=f1Jid % Jwe`40^|ǜd]z dJR-Дxq4lZ,Z[|e 'Ƙ$b2JOh k[b>¾h[;:>OM=y)֖[Sm5*_?$cjf `~ߛUIOvl/.4`P{d056 %w ^?sʫ"nK)D}O >5IsbFʶ褢sFUC)(k-C"TQ[;4j39_WiZSس:$3w}o$[4x:bl=pd9YfAMpIrv̡}XI{B%ZԎuHvhd`Η|ʣ)-iaE';_j{(8xPA*1bv^JLj&DY3#-1*I+g8a@(*%kX{ Z;#es=oi_)qb㼃{buU?zT u]68 QeC Hl @R SFZuU&uRz[2(A1ZK(O5dc}QQufCdX($0j(HX_$GZaPo|P5q @3ǟ6 mR!c/24مQNֆ^n,hU֝cfT :):[gCa?\&IpW$8!+Uph*/ o/{")qq҈78݇hA sTB*F$6 2C` |ɧJ~iM cO;m#NV?d?TCg5otޔC1s`u.EkB6ga׬9J2&vV,./ӐoQJ*Dw*^sCeyWtɖ9F.[-cʚmD (QMW`zP~n"U'8%kEq*Lr;TY *BCCpJhxUpܺDoGdlaQ&8#v| (~~yZ-VW"T- 0@?lm$K/$s_. WM]̍"W%`lO2-"ew@E=%VO"d.wEр%}5zWˬQOS)ZbF p$^(2JцQImuzhpyXڈ2ͤh}/[g1ieQ*-=hiך5J))?' c9*%WyΈ W\Of[=߰+ednU$YD',jߎW&7DXǜߍG`DbE#0Y4&|޻xѷ\;_Z^sнM\&+1gWo'Y;l>V ̍"ޛ4tO,{=hFѓ$b =D(zn;Y<1x~SJ^{vn 9 j1шk'L"cE=K]A(oQ۲6+ktwLzG,87^ 9H\yqū1)\(v8pHA"ΈGVp"c ?Z)hm.2;sl$瓴ӘIe~H|.Y#C^SJĽHǀeTwvy"v܅ ]?22R.lQPa ˆSܫ1z.x62%z].`Gn&*7bd+, Z`ͲH-nမ^WbPFtOfD]c9\w+ea~~{;Vm >|WAޭi`HbIãE{%&4]Iw Wjoru ݜmKnZ<X; ۢ( nx K8.|DXb +*598;w)zp:̊~;͞)6vnM!N5Cu!8Wq/`FUwWAֻ,Qu W@ Fi:K [Av*_958]a:pmQ&'ᚡmi@ zF(n&P;)_]µ!doR0`pl`~9Fk[ٺ+4Hhao-jϸ??R<lb#P-^39T|L /~p│x@Bq"M/lja\b݋af LnU*P(8W[U6WX ZoѶ^SH:16.Fٱq1M k'JE%"2.*""]8yܑ4> >X1 smD) ̙TީXfnOFg㧤[Lo)[fLPBRB+x7{{? ףro_nն-2n6 Ym^]IL'M+;U t>x]U5g B(, qA9r;$IN&CM(F+ hGI~Q<웰[, qnriY]3_P${,<\V}7T g6Zapto}PhS/b&X0$Ba{a`W%ATevoYFF"4En.O8ϵq\FOXƀf qbTLhlw?8p@{]oOtsϑ`94t1!F PI;i`ޮMLX7sTGP7^s08p15w q o(uLYQB_dWoc0a#K1P,8]P)\wEZ(VҠQBT^e^0F;)CtT+{`Bh"% !.bBQPnT4ƈRa[F=3}+BVE~8R{3,>0|:,5j358W]>!Q1"6oT[ҟ^T;725Xa+wqlR)<#!9!籈K*:!@NI^S"H=ofLx _lp ꖚӜ3C 4dM @x>ۙZh _uoֺip&1ڙʪ4\RF_04H8@>fXmpLJ5jRS}_D U4x[c) ,`̔Dvckk5Ťã0le۞]o~oW(91ݧ$uxp/Cq6Un9%ZxðvGL qG $ X:w06 E=oWlzN7st˪C:?*|kިfc]| &ب^[%F%LI<0(씖;4A\`TQ.b0NH;ݹ/n -3!: _Jq#Bh^4p|-G7|ڸ=Bx)kre_f |Nm8p5H!jR@Aiߒ߈ۥLFTk"5l9O'ϓl5x|_®&&n]#r̥jOڧK)lsXg\{Md-% >~Ӈ/( [ycy`ðSmn_O;3=Av3LA׊onxlM?~n Θ5 ӂxzPMcVQ@ӤomY42nrQ\'"P؝J7g+#!k{paqTԫ?o?VU}aK q;T0zqaj0"2p؋9~bޏt>$AZLk;3qUlWU Ry==q?{ƱPs(9vb$Η} % 'ןYI ǧPOUCEm<}ZVQXodՔz q[*ڔC"1Ȋ-R0ڱ}oF4 3vFf#8^Vє+k@ :)@%9@nA B q 62!/bd#HAF:iI }+2kK:Sov3b:1)'A6@\2X#Ih9)i_TA|S2G4miBȨHM(2hys|F 94 DNlϒòκ-q|xC ,gKDzHR%t+E/wd#礱ºȄWEz o\JξB.wLKZ39(M +(PWՇfR6#) +%taw59^P\JޑNUvv9\ę|t<5n¨gađT&F6.]'_'ĕ -׉6tfЮ$zͪO_sYq+q艻*vzh5~e-ڼ"ϴq*(A7? /W= #^ub"6q f+=^OI@߱^F[n4A#bYѤwd)J^Z{*ǥzw73LucVad=$6)iI gC~.1%YmҪH:ô 4P$rT`%2Aq-֢׍qt=@x#~0)pʒhŜEǴf I8!mR94G t̆x/AߙRўń ]T!n Rqi3/2 0d7UO"n2R*r<8mmN_ղwT@p +j?V&P/W$Q ?I@> kV0/ŜxtADx"Xh4|;XSxߵă@pE:y]/"(MCGeXV0e[g#B4x╙✑3'-i{SEȢbK6}{Ⱥi!ma0o xI0&" 9cT)0ߢ5ڦ==!LgdJΆmΉO]T"DĊKٙ@qP,i Nl:6'5R.j,&tK*iOFsk6[E__0pw=͠qj@o5iX0v\fk= ;H J/,t%Rwó^;n1z"8 P޿[V!ye]VZRԾ|“qNpѓVZD2"VN-m2do9 'H*IM}J ڥaG%qn*WE^k1v3ڣjm7>ƽl' ,Τ9)%@ wl42iG.y3bBA{pR A ?IE ?|-nz#}~f ‰dŷ=ɀ,m7VyIwGHέ 2tޞߛM{FL\#a s.3\}*=#uL#]  GE|FKi3&,ۓxmF͉lG$mN$!;ߑl5O$}D~5| 01 S?tq6cl]M[I5'ոfiҞ:Z YՑ"jyKWk^dd@U_a4/vvV qHMI{+']1m]<$*YP7g# s!8!ߐ>'4k7/KwΦθW'?~>x0_>9Hhs%y{#iUI[Gzďx7OnuKRv'm;/~n-KI`5-'YݦD-!+Yײ^L6/R%eZ;i.p胉,w y4F"3&ngb9%islԃ)Hc`ebw|Ī Zg_0FRYeO:F)O>UD;;MY,2ڨi"R"*R2s@AK/u5,b#u>cYރ.xkJ7C~pۊ ~;ɰ@ՙ.rT?m0:;}d8ۈ ݨW>.[Vhi̒;̥_9$W!p.zu~9x۾vC;kN?WƟ+fx3SuKQqxST Ζ2%?T74a{N8;lr`$pZds=3nwlL Eڲ t|*n8[#yN SrA GYb8ZIaʼn8 #fg3i`F#=N 3q_M]j 8E!@1vցP7!|+R@;HspSI]ڻCZUcg5pDcIϹ,oN-_XI,3\j ]ٟ5~' SuipA!C厐$eA56DǑq LA!&mYJ*ixz2*{_;IYJXFfQ* 0kA".mݡ"3`Rd1_u6d逖`7xGMxR2ϫCY0k~X0ÅKlIxpI"wS S 'MV+Z:H2d,P4J8 L72?og1>b$]ObsKx̊y`bE&>XYs䀚EƂ@K?n>lhTm' nܡvOVai4A䫳B@h2Qo !Df~;)(Oy )r#J)F3\yY|$.ZTw080ƱQŀllKX@116fqo>NrU Ѣ9*=߇GUt\+n3X݈Fyoz)HZd¡lY}1R/[?)xx 찤Q!b%U=(Kb4 1\)y$.KqWs7ѝBYǭ~RR"p9Ս8. ZKi_e}L~k2MؤYy/{!˲0/λL[H* JzeMlTr &|R 2ӗh$cd k?vy̦7]Ạ8ph?_W/&W˸~%pqq{% ?K~YG0_8`N 7{Puٽ/e>O)J=iwwӑ؇n-i3,1׿='o dۆ3(h>1]w蚍R$w>u eLš0Dξ& N7 l 2JZ=0]Sה(*CjaS:p/Χ6I*Mx8"EȿQa[1 ŮD3u8j`B59qU]ג`upHЍE_fJTU*q%b1! `rǚ8ce)F74~%*{[\M-~#aSmMÙB4NnꝤʇ)b^g`u2?x"8U [JYS/k, "~u\h1Yhl~[hm+B(g v+YtyOg(]7n]Q!Mǧ1,j28 Ce |M>8l WIf|\q4|UjA.`~@> zq 8& s8aK0&V\ t!ku`k\c0h&)IV })p| +fjI`bv0ο0voI`b#FOf_$0;Ck(OA/$ %IQ-(Vr]RS1ު"M4A/1H?>7f/"(9/1X;_q`/ 9a>E1XS+!rV/%¢EN$q^BsX؇ z1ǡXti K`$M\^B!`]M^t+C~L@VC|J,Ah%ShՉ8Y4czt~$)*427l;V jэy(_,jm$9O+/S`]icm wy\Mڗ$,DJoj*}b,?XAe0Űb`X{S38WU0v']#VvmlTzbc[O`u' pb>G'Xzө+al H d\k/I,K,|vg>G=u^|Vu^rcG"E7\qU:ڃUaV7¯8nq~qA-zW+8nؿ㖿[;~mCgW] ϿOR;gAm1?Z֒Fv`!2XY_-ZLR`ѻeH~dٮ&+IhYRc:r?cOA;?U] 1^: Rl$.4E1J\8Ɂ,Gulʂ+lh)vid!eó5d ¢Ku|I"kP-$"kZ4xIILv?Ull- })>Z6?VL h57eM>y̆@ x1Zh?EV"sd!N@ВUh p%pO5;|BegI+ I,*F~H!Gf"LD&Um/6Gd#fR*cz,—rwN>3~rmsL&ԙy-Ezl1#Xخ;R;+[$4opjfњXwH+!ljK1spHR1:YbNqǪP@o`co -Ҁ);Wq`Εw~mg%F|W[(6-r+%a>w Ohgh uЍaRs ^d6'X^N;/+ sXju~NF^)!FL !1`HbKW$^+6{be@7'XL:EUn|6K-.`oq@>4! /P4Qz5 .+h+0NJ|!N9/|a|>rt~Uu)=UF6"tsiT$U>LΦÔkXrFrcC0 X%E+o*tNU*׫t[=EAV$=q[hm"r/5jq蛴$j1;V0eOޟ4ccc2JWN!7q;aEI"*o18.Ԧg]kYkDΡ1R0ϻ!_Ɩ} 8& TkRy2Эv!ؒRKfs%(1Lhrٵ L.˪]s?I,HBԢ[b p@-LG+@_$c%* _jR|\Zdc5u= A`U`eUc\˔` րj&*ߗEЍ0>FRĦaNVUNk6=~읨gQ`LRlcxkB$_DHN |08W2/o2>l[whlه"EƯ:~m>N+:Y EaSD]<^(]В}q`U1Eޤ:Ǵ`+CCF9"*xa} Z(eFM`.3Oa|.Tk9gk_.x^i/$BNƛu"U RW2s163 ەI"*~l NJ`Q8B\pup6_3XVCC&X:nL98G qcuۖ8`n;avm=}OR9~'o^ʮLW's%iJ `uu6%OE-5630-׎/Xŷ%r׽#nl-ߑst1S%tTڪ?>>{6])|Ը>U;Mѹ .Vfz0Ïd0O?7. ]|>\T%69dp -*NVK=$l&~g&i"Ì{rQۻou}q}hn+.{pWEqws]]|/ǫ\}/J.MLmc ;ԗWrU}/Ǜ+sYn[ﯾeywyY]]¨Kpx c./mo;ߟRy*4mwm&ەQjiXMʂSzOAo.$@bѠL|T=CK'X ^/) 5 gCjN7;8^Pm:>I+Ȯ6g ,]{.9U [.6_~J}/ >;d9OUo_-V:<}xXX]^deU/Ӽ8Elq.E< AX9ۧ@Z +z7$ "i8U 7bSe8l'ki?I+/aZ{F_~ %``'16N|; $I6ÇlͯU$%K*&Qbgb9U]]]U(+^FIXFE$'=52VIE; S:u1V#d+͗ 2/VcG40VE |GYbT،6j(r5Z-0J{U^f+8wܞ_~C?Bȗ D$zGu0VXUc?mZ1VU6ރ?VSXUsJd1/C] 4GwBj.pϋn]]P_  o',F)Lm+`%8x=Jq;^>͢qJ/!wcy3V Ds ?.)v]Rz z]Rzqj g-T=7]]! _ KK*KsB53sKu52,M=/|55XG#֘cF`{y`fuZM`n57<*k(ɼB[wtGeY ;誤 _r٪uj;Ŀ#U >A3 Xnd2|LȄl9]Ro0|Fv#PxF6#sO75\89m9Cr Ͳ^SS@jxpvkB ejXuŵp }}#y³TTRi+e@y{(dHmW39$M[W~W}+ÚmjUlS?/G(| ~ !*0#Con8c8Jh~~I<}<__wO>vD1>mc9K%_ ר;H[l/P|DI6awNfw?;zGEr[?&ߏVX{" ˙kA>ƢP@75GFD$2󋨶댗d_L _7xPmL2Kzj%ePf3-4뺆%Lt54Y(S ô@35-3L}ƨc8Ր~FE@uAדZ1G*Aí*XEq E"&0B> K"ޤ?Jcxgh*J& /(( reQa|`s 1si|2sbh O8{ m">Gfc3U),떦O|\9K]}!Åt*qAhE(UB+1,=GtO:ײ u#](8ZFe4,o ~>BF@{ی2В^Os7Կ'UQ?t8ݜ|I'.It}oR64$L̯_G|ʢC0?2uvL~G劾;3z$} qIsαbv݅ܿ/ ǢZߥ'0dg ,4`#M 0Ί*J]T%|pp`x-4Dz`4J,-`AID-wל+!U.zaz9ztL56 2K͢ل)"gkrOgOcYߠnIƋt?O_WMI4j_#Ds4?ޞ^c $ h( -R".!NH;iCJH>.yZ}40ZV$.^<_W<˚]߭ d˛.w!D]0{#dm8I~Èx026Q)]:ARHiM4o-eNND`$^  d륍Iva?>@֞"1"+HTi‡?xxW$w7L&D@#CZ$< ^8@ e]<+\{0$2n LaΓJFf$*aF2]zrsDJW)ҐnŤi.cyR:|0ۛ1H6ˉ :wDA3T-*-x( _4\ 7*UU#u fYܞS&P|QGJԵ5 )΁D)!sTn(qATM Z񑞨JYI|W˞+3*,*McjYYK^avYaLfSWQ@=F.S76gā(C泻hl6A/n[]G4l {ԥːh V?Fe*-gY Nd+Km b em4,f0fmiFN%HQeh=$fv W<L)~Nc`}4^dHݗky ߽pU\rr\oy ,.[9j!#]r3?+!@|c1̾D'ȸ#r 30b,o@;Dn[aUgX`=vBɠΉZa=$M3#Hi:TTE u4?r}ep.)fWxYB39֋B j> ,8Ӝ9n?pa. P&*9 ,JH2||i _G|2WdM ^xd;1F`j9SY/,B)0o2%J՜>뇞J4T>TsI4CxBdž{<HflI\eຶ ̖i0z+ ̾^xY}y ,31{%Z6b]lL3]1#j&*Qf][; 3GJ N3]p=-ӍNS$_& Xl-ЭIapEcD.=bu hr)ne~Cluսy\+!j褹1u7b;*>.ZBJ,B@ 1CjӈPn7T]`$Y p}lk%(쪴@!{{쪲|Q#XbޓAdZLAP1}TFmh|\bzf7[~~,xz|>* /:Yqe#IOL URlsH(;v%Xe\4;u.:2"ݞHuC¼RT -.k^-Vjvm2Q]nZ _④MYLֵZ Xz4Bq̴Pf5Wֽ2þ4nV?nuO_@_{"wBoҍ&i^z@zCm^{h= eᛗ5aExqn@27B t'| 5Ӓ@ K;@][<`!B7}Q9_T[5e 0 oz z ^-K0 xA6F*Z"]M9 :ɰyԝsOu20yU]׀ р{mdh`Fhr;X0P2xިHv@09<4j L#x !,9*!t.Ran頑^8جg@f BCA[|2 xh > IӉ Az$w*)C"%0öa20Ł)2v >{VYHX+vh=1iZE yyLS \9W_x")Ɯţ͟gZ۶_pvv pqG=_;nwx(Hx={@R%Al4H舎^7 P<-c% 2likZ{-.ϔb@b:Zb˼vSд-K0w?^~~((h(Uu;]|J^c8:}myEy'׋4 ZxY>oYO0ǻY\^'D c+oPG#N+)I#Xʇ>bn8jO G֔x~ 8pR4?}y uG$[=u$(j?{@=52O?"`Q6)x eDr`ܰѭOm6 YchayrߧgկgG:9 \hU3+pa: WBCzR;,_:/j7T_ ?/)4슃y 9 >] j.y Rv2-*,tsw%{n3v/Tu/Y0v%L03GdV70otNG`awk-M0\L`u @0xxgv+LHp0N;ź.L-',#3Ѐ?W0vRj%щ0xk~ږoοVm1C #8$S$HdipX)@\pʁF"SG8A4!uXξ$~(9Y<5p9R ?atr-W6ɩ\WжWS1V{ ,EX]0I&Yf[qdPS\/" +۳ΒF ͻ}jUSNI?r$.hqY3eS7Ե]*IJlυ3IL9#䲯"\z '9q6žpKONk3Q87T #y8\Һ {̏iK@Ez y~`o`C׸bjSSӜr_J2CwAfPO5K`%X]YONbN>COO;jwmŰ9(_熭\V D xy8#0j5\Ya_p GqdIv=XrU1ѡ7w.[Yvk;]iĄKl)8:;&7JA@@~z-n &mW>f[]PfHUt(a!QRQx!)hCu\(!֭7G$'ǯ@HGFAM7ndGV=3(tc8W(D]o2_ѷ>"y,c†Cmh?4x4=ާV48ǭZ7Jg8[p+S~,eů.dO4I4Ry0Ogれ " >FjVkB\Z ykU-_װZ-co#kU-o=!Z\xPEUPEkp4[] G@SI<;Tp-dܵ)3q}z;G7_Oq߫{(p8J`?~suݢsotϋT)Q;+]6oSm3N<,78$r!%з@Q{hܘcύeDcslGV1A0؍XrAj^ŨX&I9:_R@LJ$o`ی¶GAQ^s(.UQ*e#((|Mc[-k/v`j5 ENJ:OˍY6sGc8=(m w4X?+mn,m|gC6>Ƥ|uL)*+9]!@ۀViC膂Tl/[~M([%@(۝P4Bٞ]}Хl>e{t*{b}ԯ;vݧkڷ{w.kRk@;H5܁PkPkB F'ګ;jN4B= w Y%فPgwBI*>PwOBV v ۝Piz{@(_%@(ߝP4B*FؓP%]8(f%,-t@/iv"ͭ^:QGn.2.<OV7Ih9a`,MbN« CByc Б~LiGe+|~)O7 ƋIzHT>>m>3`m8t`≸_{>4?gн?s IwI*2Ή8qdj2 ;}J^0k q}(P]p~yEWk$(nbLU!u YC|=@ -\8 #<͓ =t46pl .0ݲ_]8}Tu9XMCT.b20s4D_a .Y s#6ڝЋ&%2ڏd?pNF0CecT4sK$(o `PL0=BiA R `"ǔ"w>p΢P)'g`EZvVj8Cv 0퉌Pֻ+H>XtRN̒GYtDŻdfS@ .9qk׈}ȪKgda<9yxӱYUԛ3 "E LeӵIK x 2M'W6{e!Y0>X^U( e4@Gi=,w\qU줊(dp:0n( ʅ;R)tBb]eT'&i_mY"ʴJʚ4H_kp-x-nZ@q8AcVDRܥ!,G՝Zs K'?S "8 P]/k|Gwgrb?Ο^b<jWuj|sS'N#Ϝ1~)2b֪|ܓ4f,ERAt`yB%XlA~d=`|p̣ x䠎ץ Al3i%LxА-{8*128rK-W @h5SU]foI+|HGQ+G_ 3*R%%\&bVu@+4R)o"(h5/p/[xX>veX}mopZ ,wȏ/~V6@\^'KÁS9_24R\ĵ][Hđ҉Tr(?( rj'1dHg~[4ͶƮs~*Acq3*G|szfwK[/;D*3 $/W9o;80#ϋ6φr|ʆ2*db5_#q]. "BʦSX_qi2_F~76H5뫏xb$i8ܫ3zԎ ({oP9̯7p3{'*/Ԫ޹ŗLQ,K_>Ü7hO۳)~e?p=nZbwA z}=`ғt`y>XP(f#ږ88^_&bivu%! ̯߬@ YN5ԋ,d~ڃF#oԩ~@&?>`r{x@Z'd0GI =VlO)-'>!SSU@M}xAYef zQJqȣ+ir~=N@؞k<;'u~81m#_ ?sP2#]o_-Dq{ש wpFzx;1C)ɉ%d?xs4e=;YCXUU_FjN̊mlj}uUF)"fa{~r*x_ ׃K IDeZ|ӕe۷{+h|Hb9e͗?X>x{zx$FkQx{2Jѳ$-rሑD {{/WX M 3kBoVN4,N֘}ɏk+rVk%kI6[,,XlΑhHbG$p!)De{''P[?L##Iv:>)^Hs뙮Qd[ʥ` I$y~{u\= 3t#k2 t|٬{wG%P^Ҙu0e^Ĕ6n I~f(p*x7Qαg̶-H:I8=vF|gC UYjU1礱HztO7k}bLL'gXЁ BueUq 9}\f}Nkƶj(q8Ӂt{Ob:k:Z6pЯ]`ڂ7 Ne.xބԩ^0bzGxcƀ5,9=b`B"MFghlb%%˴+uw1EPB,|),  ]a؈CⓋ^&d$jMG! !$"亳~uCVZ p,;WaMn9ۄ8s1Y3~Xb}z3x8l})i.m^#-q5]Z*F !F+Vq۳ 8ZWЊ9:5FlKH#7vJc8qjF<wb$kM9l1^1e>$UAx01<OsyX4"Z nmո n֒#rjϦ5JbQ\.b#-Kasx)ʹCԈÇC~ c%rBC\ >0+ݲMc\1vs{sES2 NDRàipWB$ KOA7!dt$[5-%(LZlIt想7Q M3,x@Z S lA FKx",$d/{o{j(J-DgDyϓ_Jw4(J:F=`:# U[5IxT[ ώG)ĢNݢNKc:ǽ >61!tTl XR er]vfߊs\;3k5"D1EE3t}!ftXb{0V@JPLCA شF"91LNIӂ]D5RwB.Y/H$tw$83|8#rJIxM w= {N#r#+eMfQճ"ҫlhhɫR[0'cM~di O1=uwQjOziX7`̌3ƫyoTok"ѩEquKH\K( q׎AD׭zڽkviVZ979ejK[ސ>ECP(^*d1u9}6=AwI4˚'fL]pj ockz#6ڙjҨc ?gsϋmB;Ip.28H٬[w7jL4tVNO.lJ| ]mF*tdĥ`c@I乜_)\D霏=+"*A5H\39ԇq%* ܍- 4u0#jW #t`['ZW,ĴBOEQXcr!ćެK9x t3R/ c\G.Dm&㞭WD\AI}VV܃JE00=mIAo381 a4ܵgQet2?*MպA}ίނMu #k7*`Hc[! 383km1k _x _*9b]P2.3pFaPVlҙ>Uٖ0|XĈecD(Gfp 'pЯ `.'1f|"fYx{G<06h{I#S oJvk=Y*z`hNa̐RO_`(cf._)ptbQV"H# Kړ)B$!t=-6q̏Ppn0ٿlXHZ*Je[$&]~G`kIOU y߀=ELmBr>xUVkZ5.KUYfl m%#捜6D v>n7*?3nQ 3pրsЏ UX8d*%P7[RҲ".,.3bJx.o*v:pYhb.?ػQ55kZ|]x7iLbpeQk*5hb ɻ/E";cĝ6,jfl3~۫%q"kg!jxB>W[j'G༸ U`;u ^e*iDaW .(9hAϯo\_c6kCh'rPy~[p x~\6p3| |Z@O sNHt&鏯lLr*n8 =!cNl-_7NQUy (8J*hzQj@A[O]?+&._MQd2.zGG>!PzgeO0F[êK|Tv\]?dc`G7iy>+!\CL[] F=5s?=GF>KWf5 :ZY|Bj!ȏ k kG byPkf\.ڒ7jV/I5WjDHX џ7&fɠITe>ބT$ t]שocFxTf\HU`&t{T@hI}e;bѳiD"mK*=@-EˤAP&i\G(mPase:mB?x2 K/齎Vyw_O㔘Йq /ɧcB=+xY>k6D`-yB *djYjK-jjnEzM~P/" ܦ,`ٞRe W ԖicVQ<@plGrݵI罧o弯a0F(l9v ji͙0i-5K|d*>,æ/M@;hُ(8rdKx S# 9&`LK)+jkݰXmkB˸# ia }Qkc_[0ؿ8.rOWA =Vl[ţKșH*ᙇ0,{pz^j 5nŶ!, p!W-ɽ=#[E^Vܓ#Ù }Ŋ@1)~ONwxQ4mqZ8x/|g#ZM?S хE Qu oΛ\њ7?*E<U|fo߻bä?ْ7_=y ~6&gI_>Jޚ X)*$.D{0Vi PbÇC#=:VvoI1$eMӾ=% Ԋ5sE[1Fy% .tktW}S"S\ķ}L&`Ȥڹ\v-?l2Zj] LlܟgpzdRѧ 7|WxMDҭ %®Ge'@;E]^4h7\WG!geCt(t qC4;_~~u3:cfT.g(Vg9+%O/X!SnQWh}sUvQ5ثAdzpY,q+'fbJTvK4-Mry@ ipHn) ']b\ЦIF[v?3ƋPqXk77*V8`IVL[_ӉMm>1gxk~5"'y5@lhg7>XӧɅ=vSp]-þgz}$;agP@Sy/o檜ÈL|ºLQ(w| >opOnbz=Xi{FQյEyn{{90~}9x3/-핿޹nʆ烎w=TB;x(E穙\USAS?/A5@)?~5GCWuO_ t!J,&f0ͼ1DŽ@<B㿣C!"G=||7rZ%޻K_-;%% ]"Wjy /'(:v#MmB;=3 &ϭތ\wBh3j6__ (FOzBh}hP˔$7Aں}J*O+b=dPw%-Rw) ^.nXOe*uZ* w_ O $R#ٹ/hp]R6\$fxE?U- T Ulj؁˸),,͒ c۴/:4qeRyvCؤ*RamPH!6s`)1VM;W^9&jIV6K#c _$t>? zjn-m[#Sb87S*"ڦЄ%s&QNhT%p, OQ'Xɔ[Mg3sr As:*bSa: B R嘳%: K ̕q SR{Y'`ēPc0F1kh^Qp?m"0I&N'2J(%o&kcyMh<+QC,1ͧ 6}k˴߸ ( jLP E 9hAԃ6y Ƒd[T$QlH8Y}vj\;!0j Me jS>>1$)1M)~B,U(R6YҍAm!/#*=ZUҘBZ% Dp"i W5+pT+j1R!".]eTm8iicJ{T-e̋!Qĭ֔!Ki[9@C\TʡTYpIik5ԇkbTƪ[qf_E0 P{2|3.wW$)OIDQY_ֲM~RW]Z"cp8!gQ*+,Z,ӠCv~Ӗ:SpU02b!oڒ}suUn0᫩"C2ɜ#< ("33$sj=g_I!-xQ2BH+Y㙅lRC,a/P-Y1^ [M}&]ES=d)0)%`"sZYl(&]yӅUIb fkQ`Q\.ŠĘK$29, YӍD24/i\XAhVJS.dRPFFQl:" ZNXG\5g9<3-6~g+I8 Μ9he]Q9 =)ҎݹAӽWm{G<aTo#o>@Uk!Eʸ)4E eP.sK!g?ʊ;ȇBI` NSޓbaIcG#\/iIG+ =Z[V JU^?Ohuf CNJ#wv(z 3z]y(cPl/{2Hhѣ?P!EmNRբ|i u 2 6#*aTȂJ p9ϵ/[P*s4 #J8i5,@Aj GmnuvCB]nͱIRdb`8BQp9ثAy+ V >:L 2jlVfMP)Zhj5qȳy%&Ӳ0 Dٕb=$ExkٚymIև5R^'ʹ1r>I?G$}R1CԏHvU? OJ3Jz3 =\ޛaBPG_]!ꖪSǓC!hۣD!1#Q3*wּ!''eG';Bca@_,jv{V/iLIdơ\K}M_mf"A-eې \; fyЛqa戫 (cZqw#Аn oqxN7 +@Gq`vh$`b\qdsP]; DP2j X| 8%n"bDᥗF6uXIwE>eRנfu)O1~w\As*^^Ms] U)C?^ݵZvY^d}|>دL'̍tF{jy\(j؈JQ: uRi?|"RXֹݳj3zWtyS$\nQ> Sb9l}ڢKSP^+XOO>6fzNtI!31Ŵ?V8I, ^'$w":+Q57\MP{}Q0 - S|mDQ;"eC%ҍVHbBWJ}Te]P$*p=(Jv5]F5 ð\'t:qEuizfz7 L?ʭL6 -_PF瘛hmQih_;R)HI[UYG7Gk41 =.DrZ&1 y8cF_34'Y_#eqc&,~F|6OVqBQӳɸ97ACeIo:_||8EOڐxWϛ7(xS%axmxrDMTRR)_)ܝ԰B5#F(4#[+WA]@OSfR ZE8'hj'R>L)t_ih~Y~>G+O?;hA4RjS*W·.x|Y ъvb]8:w:SR}T`uh\\F:AHm8vQ@Uw6pۡ:kZg+q|# 7k3*S=m_se2Q ϡrﮭ zAtA qu|Mψm+gt6}=رA ?C'ɅZ q^zh2-+6W>e·ُ{>7竍ݷP &fm4Fimhh9ccq qJL Cp ְ ##3[8l1Dmg8)/<ΤU6&,0 G3K;HixʤrLm.K(!1 &(y6O`uo\AY]hQ+Žw_=|gq?zwUI-oOq($w\Sy8MVG8*BFA戣dZ!ul$<5,ÒvuJyL{s, ^Ŋ@kG >K vֆɭc/i#:*?rݶk`: h\G31p7#n&&21:3 g^dG{,_MXTir3. oA;?gmz |!OZ}=#l+o=EvZSjFWaza 7iZI5Hlu'ޣ ZA_ ]-_FB6Km #]-Mq}mMsOOa4YO-sb=Bo.b>dWYR\}LN>ϐ7ו+;:o9O˓kGz\.V_OtyZ& Te:=JֹAEr5ڦ.->F*GyNJ'v檠k1r gO*y4A3yN§9Bn 5h)7<0_O|ѫslrk?[)w_]n/狉ߙ>fUZ9[ Rrr`BnoK2.&\IOinp_L/Ek"7q{, *<4); zqHFIyi=~A]Ǣ䪎^]c׵,:_F+Ioԅσ^JΣ]P8ZkX)JiRkX `H*h"L!H0(u>ꉤ >I7࣑>aZAsFiU[܆ۄ  MLsJ>,y=+1$N+lvx8hs4B*S;LVoŗj1{cM_On:Xk6 E|grTt&N~XU(cB|^7Sy{~siX*j~οϷ6Oϳyr ԯ$H0\.ΧEd+B4=7>sm.L0WLgy6}xemQU&[g4ߒ0y4vOb8Kbv%.>ϡZ6T[mq+A՞uyЫiNKTIha/#ss7T \-fzU?<{r/Zs>+Ļ'ቝM /(+Y=/xi9IV*1 V ~o2a;rx<8 rZ+5jL8ەE| 6f(3t_tBIθMұ L$Jzw9.swq2 }w<Dip晧0̊6oj@uZ9~ ӈK+ڈN>JI&X- D3 +;>lW(LE}.Bk|u1%irEKuj1PH%V LɜGubrtq1e9>] x3(K{|Km|*E&9?Ld=D甝iW0G/''w(ٽ^~ưdún,ϯ0uV'UMC1~k~BI~kc} B @]Ld,wgckJ*W?&0%>3yZ<~9TmL"2zM,O*XANߪ|K8򡪦{ٯv ʈ?܎?۱7'z|$hȌ9@|eƴ7`4aWI ud7}E!YbDQ =TVQ-pkQ@0ھTcu}T_Ykˑ'9D UgG$l5a8|[m.?+}SOϰ|z~>ں yq=NF(ta?|8|#?}4gß>,_rIݴ&n6R 6T}_s'nIOu}!Qc+nŝ6Cwٺ^[+ږi19X[0 M =Zs1 n 9<'>HJӦJӪRCV-ȇ\MoIUsCU򒤊Qq" "*$sZDLC@ rJ@kRH9v7,$[v%,Γ[cxT#r'x|C1"1=#OqmNL!5D %39[1|Of;7K(pw(⤫FCV-ȇTάN'DS+ 6TyoT>9v[̓-ȇVj.8;.WQARH. I95>xG6T PSv"f%HU4qY&E8N{&tv1#]E'Cx#M̹@enI 8؊ nV͘[jƑU3=\yH'.W&UR72vgy;ÐUT#ٲG1T&aR^J֊-NgeȆg3ϴg1|J l^V4~eϒLRq!y`'ol7wQ]-|zmTTXp|po E5^7Ns|3K_{XK[QyMPx|&ќ xDP-Uөk{Wyv6PvgiŸ=6_PCUnd*r6&ɦCF97Mr- 쵍4pq򍌭1>Cg];'<}NnZ A='_-v:߯@B%g`aCr;W2ƸtR0ϵV |Jab:Xm#Qp89_*PD "i@JWY'>62*hG>Tn<﷈F﹎էlJ`) L@nf5FT1g`VuЯ\gfg ?ѕx"]%Gx,i.*}BM$=G+vxӒgaNmՄ=Fe}<]r oޢN7H5ca4C"W( !˩LäUWgg <:h#Q"$?3cO֩\-k ]g! x(*#f,?,{Mg%%KB?IΨUtŷ9C<>CUr4N/ 1x7IP2͞JgρC̯<#GӀxmPCulSCP`RK"T.Ix4$&`"D0&J>v|E>TMbG~F(I|'M*PMe)cCF3<E>TNn\[ mmN0ѓgY(ǫ2X.^ᗽ3*͸| e*QGW7l>6:sNr/&~4KV.O?F7&F^i?׸I{W`;/2K[@PJ-y@clϺ.kʌXuQcjCRA*s8 FQ:0yNwF< EU.' t(9b n;p's5n[3Uw߲>UС֔25z fP]tD MH^\6k3N-eV7'WfHWʬez݄˫ыuMώώ!.kd &lzqq#zD֪y֪b숮_&# sq"[qSc-B! q06V _+Ç  o%]a `K05%Ϧ#Vd2sZtFcj&!AcfQR1:!$>?\o!,ӥ;#^ʷQpvE$/>)"ڍX2~r}a 5I )&g]1iC>L~1AJ `f \'lL UӝwEqliI @B,d- MzvzPK_wU vv;Idó2XQ\@N;K?[5~KEq/8a03bì>9|ܣ FXu 'k=3ª>-q,"|9f,ggn-y.s96OkV*:d\belAr_^5f`V3r+T5ׅ0PB&S}l#tQՅ4+tm*kesN)uM'DYH]cy5&U6c@P}&Q`#}:-VUToЦuM^QگI> 5&FI>%}o_5`\ qf&~yjt*ߚzN-R LD?zo9UfPB7~Y3nbq'ñCqU, >Eo{!D !k{ׯq(lDۅܝG4Ī͖v՞腸$Bn 1؅~m*DZGtM=yӯqut:Ds);8C% )&]~ՠSe6sJ6al<e:uݩаҪ6;M )c[.;C jZB7LG Vv+|VCk.VHDU\=u8eIyeR%\kyjh 1k_X ^T=g4hCAÜ9y;-0 |zѼ 2"AZe"=e s*VUd`+ÐT4z}|sC}BۯC}yx1gHoԷMv7ِ sM[p&mn/@ qL6O=_3qiV?(~3΂HXmlpb{s@a6FUՓfbtRY’uUosWi9`/ٖ~qkFtVW<CP/Ί)5MTzAv(8]kijv8Pۆ*2>R<.b ؝_pB2&rѕ|iu#!2^[Eve7\%Jڃ k *)D`0ix]T"f=P7Pļ>#\: :6Cոu?gPdW)nh}Еs1 0=;ĸ^󕯑2-8Gt }ƻ#20®TUK)SET֖*bthGˡȇ\r4rpjtءOjy4<41M;b4ep:J Tֵ-}MF}σ#9]-$2kãqCנ/l\>r6ׂߜjUV}-uˋ2vW3/ĩ:OU n_Bd~NhZ"x[\ؔX` \@NN[Mꎊan$P5zbל`U `o3T tSZN0C%Q+ppI_щS=JK7ao=Ǘ7[F^%?׸Egp}uYéNw"cL=zTv(YsrM~]) {EXyNrD.H *cVC>T#c_l<*"c?DEd/!σ!Uњ&KH'#dP۩/܈606}S{`Row=br0QrE z]EH2+ Snkqt#<|',DUp52W 䦻`yomuCFӂ̈́݀PImGGgRn+6rv@pø,)bmg<&G/{εF|w% qך؉>/ZPfDRe27G;"@t*ooOx вlyR"^k&2L:d46 mPUǼG2Jes>(0?pl;DLx3,i_%܅.Ljv̜64;MFFsX>,@\bٗg2G=5ftz`&5=ork%O^@U=*_϶͞*eZSQagY{#+w mC Kva7A2-(;n{a{f~EKݒ ]bQUdX*Zyk^_h0TqOX݀-Jx4$K8 ԆxsIf\JrTp֭QGUMOB/Qw58$׽=fI;~CsL Pjp/!$ dGI1}&:mTmPt儜l=#pS6j`|0 }^שlZlt:p1[Ҳtbbkp7PӢsH8 ?.B@:>6(aMOB*te^p9ӎ^ؔFQ-a;LInYX^QFRi-IܒT*!W8$njQ!923(K(LjcX%e1q,*xᤫQ cTC$W%^I9Z F3|SlXب)'ܳc*ߚ@7(GɾgҗBߴ%+Y eB}nQ،xg0JMogP o)tѾjp8q]症&.0! 58zFu?;'HZkSGy"q\Ĉ^5V–;lԻ4&Z ^֋lve)8|h1%af6 se|˛Izcݣ W*xьRrZccJIBe ch4̃Kiola>UuXYxѠo 6 *5 ;ЛLNXIdv" &^1F5^~)%c$J>o*.{z4)>psUPg n3k#̂jU̹mrP[0ncj%55LrƦXد;KCJ3mOlx?yNx6 p #q< K[, )0t#q{\=F|5#yR"H0,F[as䇉ꈇv$6RT ЙNL,7GD g $ O՛Ob/#3-F'l&=hԴ4.$56,jq A9o|1#ކGt[MQ8޿82e4IJ6ƀL΂@#<]uA/%V}ʉFe=L.܏Y&nT㧢԰;G1Q8\YF{${XpKIJU=,wΥ0mxD&#[hyܹb1?-pǝ*غ YPync4X2MH$SO<xSPl B*],SATL=Aƕ%湪dF_H6oRS~䀁xP[M9ZԠC!oj_!0/ gibZ ?tD`NmFogj:Jޘ*g R2_Ur"K @,dFÆkpl#ƨ6G_F"|6{Ó0758z|&i{߰G92YFd)U{$=KxՖϰQ2G$TCAң=3baXm^R{-p`D4#89̔0˚bT 1hJ2(f|N`="$Å<ʞ0|Cq,FNv1/!cp/IՁ 9{۬o_c B`ii""$Snp>L3t#RͫǎX/̩9spY# I`Ii66[88)[0 >CΤَzӫqJ0sjuDsYFr8 h)KD0\XIjps d{ bn%TlS2'|'4Wy9 2ީ_q7,^J )<J8jH&3 zWJpGiX$;_X>'cs8~f$œxëq 3*ƋN`qM<[q‡%cgc.2)6q: A\toC=>e1bڜ9LHjTtt@sp*Q(GsntV._B6W6O(|<0f"* h~ GIU;KikմVXZTAlj 7º/@2nM\凛@^pL,^]Z}~x<5)pWe⨴FU[HT-TTG{(|g\C^\Nl*R>e)s*%ѡZ(\i*5XBB@*Pkh" [)RW?gy6`-'idTfXhyg,I%y`Nf֛6UOQKIշJ'3&#KTHj8,GzKn W?0Cxw ' TgۅN頯R4V;;gvAQYe0suXL0@8K8axi GI+ABj̖_^^q??+@i58Z&>q_Tub_f5Ȕ$+GLhNw| ӎG?!(GP \[Gđ ^TA^55JO3mA9XiB `qw{J+ d,&,2 ?~oFع_seMj}b?T^=Uy;ҿwSXU`2\ `cFZW"LgK,h9:r/^yWbKG_hdgS[o> P"?'_';͗-,3n4x^8j9} 3$xW'~.]M86c͏`HʸSe=>mgt <~{˥zHxHc6䂩\J8u,c&MS,?`P$9߼O9?Ȳ8@age/Xga1üXFG> 6#x>`4v>9$g;c OKI(bғ_?U|[ʸVB"[y\8fͺ(&UL{]?[3˗Y~P6kՌj]]exm}W_z]="zLJRb$|X-Supg_“NJ+\Ffʚ?S=^*ȏ=K}IA Ai= &0 `8; jDMXLHqc|>e\P-[ Nj׌@M^.@U'(yrk0oq%MKejSCoA. f `y+i@EG'lJIBOvXq^-K?N[*C/aquTϣn72![#Ty`9 h 8}͚W76@Y7s.W_Ng>y?g SnyWfKY}x+K$Pq<(&7nV, /+?k\MW*^9*{#70)IT+7  (H =7ۊK 0VYNGݾ[Jp0/ժ B蕓l.O#Fv Q{" +QpoM%Ȩ$dE!Ņ%3ʍջk9xѷ\Ƕo8Hn4E_U0co؅cWyC?*~k_^V;+&bmƪiB8꼦RC,rxk%`嫜w0[?%|]NM8ZIh\'L eh,J3#r8X`ib4{Bst >"4f]Y.2VIJkF,(-b7 $?a7 Ca\.3+/"gI((2r= Pu;k,P\LRYLERH3KRi-IlI*ձS]oP@%bך;߄s8J'[ײlɂqdjM$ND }Mݩgb\X[(J T0bID)(Sǹ}X.Ԋ}7Cױo}窊)NT <:iYpzF Q GrU(%:څ"H$LX{:p-*ȻJw-.e2br-O<ֹzb>ݳ0սDSX Ģ)I ɤb!9߄njӄTw?W>xGzA:b0Xt9Ne4^9%% K)u:Kag3%ڑudc]NM8a%bI%[utbmtߠ~ T ƭ&Nvƃ]58'yƣ1bs4cs)]][o㸒+Fcݼ_3}3H%8R>8}I)[+ChH%"bU}rkv{q 諿XuE!8d!J,Y+2yXQ:aJ$֊zXAfY% !OWsݨMO^#],~,w:Ymc^]u :B|Trl?J/}O"0A<1]"Sxr A:ψŎ(Xk@{}5nI{sKHRoIh/Ziھ'YR.}@ )_{&mRe|kēk>Ɂlԉ^Tد6уc2f JǁY Nh%g̑՞4Xa۠@} 0[6+(QŅe_>Nɗ:4d2X[x*Ev2{> HQlmMUszg"{_Oxֳa/UnQrprA< A\M!~٥I0AЈ DQ1*tvt(Xr{ҵ Z2gcC!"c<5<T+\1D)vp9>G iʗ<_|r bHk4#c|d?J/.7}Ltotwxk0=@Kj{hui{ NfD(y`J8jCCbVan—ÎM γOR>t Q>00sx$P|$!qw=>҉Ey 3|c| JgWQa]nG<hW:)|TһB)P}㻣 ehnY䰠=lB[[U*{N8Qm>J B|p")3 *FSH~lSȹdFF&U/\Ë7[_!Aoؘɍet-G$8+!i8Iy:~3-σl *40|eJ)jՃ]#$J?޵#D,dgr֤ŝ:=7Wly~@ūz͚y1[wZY5cx@k bwtQ׳tpE&kCqfD"v@u홱F{t$9og()/2nXI9bz$cEJZX7XfQ/],Vx8|>F|_C^q18S/ZQ XS+eJp!lؿ&PRo]sjlLew]͛~_:Y~^Ba}i$j㲰Q_X4vSK)!*@"pM3xTJdSX#"P>wA!kx7ZaӍvYq7-Z9ҍiZ|9,i٪xFWhHs b`%n,gs)1.O#}D ,|8nQUp{(VzYB|ǸD\y Y\Z-}H'NMJ]9JnF%/֥cDiiSTjB3Ĥ RQ柔\?] rB|$)- U$E]^Yp._y JU3TRhDl''\(Z/.摯<U- KrP;6 [~"Po#^?B SlfTua M@7O) Oa`eg$r hd.s#qB2j[ za)y> *@|z)*1gt[]!;G<z%,/DTs!gEqe{?GKq=tsjv`aWA~)z1T{EWslji2,n#nS!)R ¾}Yh]˖U? W@ /[6/*DUb Pg>045vGH0U4d3)rX#_T< 9"on_Q**)Wu9*!ʓF"\t|;ݴ0͈0Wyq,f,d)4^4_|<{Jkפl)q IXx9wJjd /4%_6r^Ho$ P˴ec `C/ )gD띨L~<,Wh/R1L@(g6Ms1()yV-uWdt"sbl٘DaCEI҄ õH k"ŜBRZGB$ "o H`mS;8W`ٳ٘@ g{6B&&9QI"c 񒐒Z.p47ɷT8&\I`K"^bk٘bAsJ%FUqi$Dh\(P US< n'6r]_yY˃TҎ͋H;b qAQɪOzXdP*ɊrA!ꭒPw!Cþwͻ [ רbFز1FHhp.%qRʸ{!J~Zl-v}'*іI$*prsqц]Z%ؖxz:!HR @쐙ӈya#٘ޑ"f RgOgc/ g8Մ(_LQ:RP\J-z'!-0-%X`ny'Y2@{8WE ?64٘d39K0nw땿`p٘BKQKCI*"XW#^X4 M JBZ!eOHbP[I4t6?z9rU-6U|_K<û'_A0/ڣ>pg81S[1~vӔm\G]z;CbiIr9>Ru(nPm7qgohqs͎>^3q+SbXrZӢ[rи̶;!Mpg ~NFJpy>7>^c22{ ISXv_ UK{Z@Ow ޭJ [us0%4>NlW4]ۺۻ[BffLl?ʓ]l[[*Y۰az?oCC]?/~SjYy4,~\b5C$M~{8J%|xSƣ/U._}3qވI_]T٣tWpy3EG4{QmR!ɃSD|Z-GVu7|ߝKhWglNu嗦06qg$+MXD^êX{(Tז цѡ`V()5y1 }k,%Ld M<{ܱd2F_yoSͳ|9"n^-r:̺E.tygס[|H;qnN/o6V륂]#gܰvJ|, zjq4ذ?yV^O8+tlYoMF^3\o_o/_m}ysj _ZmPt9f=U,}z\q)|U oow:=nM.ju3kx{?+jvo:VM|ͪu]>|4R_\-zcZhu&iվC# [Q & gx6c|“q )\@Ѳ%k+8V:?83 -ݧf3!G/09׹Jec3R&B[˔z]2xq@nH:9\ "Ge?ikeOZQxZm !Lڍz^߬g. 7~b&[Bp.o s"ΰGF j Ch|m]ސ&XB[2lon^~(>߷QɁۢ@M:b4Jgc ^( Jǁ~: I*)9cĮ^\ iOW_fp\;02 *J@}PȈ8-FYnK煥@ZO'$IrFrJ\)DE&, k,{NSI9yLz~%zC:Ӣ7x(?P!ԧBhzT/+Piy`/@9d$ )5 JRfX{Ro @a!>lLBe[z) Yժ,4N&^{cFS FxbO7nP2Op g\1XǼԌz4 3zHB|+F/2lBxzt-]&4XYx*)[M]m~_-6[_ !f4lLahęݗ_Kwj3:ǷŌ<#8 $m2 PgρgcEƉ>/.tIR:OP#{G*5hÉ@(ʢsńXp^:*Aѝzs#aF)r'sxFc}@Ìȑ+jUղ %.Q@/`JgX~)狀R|>8r(zKv mVI{zIE=PvI3eN0w=ܑRg8SL?ܖ%@._o,|pA=\*9j'Cq4 KtD-ֿhAڨ۞`J\{fժؕQ췰\K"XMaLjPҔ8'lTby="-6 _!"-S":~B.`)&9ggbGԾDeQx hK/A!Jec\.;P ǔs^5AɇĞ${Aթ&4cpV8â!WșB;9-fη.2@"N7JGQ<'..vȲ?dw=q}32qp rE)hj[ uq6&A*#V\.7hK8Hs/ $Q{of`M: hr0 "b$C*iitX[UjLȫ~A/Y~RˌJiUJ͡Ɛ;,=쉵#MO+c8ELD,7ndYK/^X!%$Eqtz:nf&dfwMo.f5צ&4Ldb>J)/VXb jTO?Y ъ! gK I!4r!0G 3,H-;zd Z'1X#C J'*Dp=dp|s/SxvZQ[cgCu@)!R"<O:TbsÂdtɊFVpިB;ˏgBANPF!!ȫ3N1WQyk;CgFCD=M7'$JZX)"`S+ q Ãm$;+,CFAD*$6ʖɰQH`VQ hD<2_(eE(D`01HVʊ)6 CQBGRG~g .}T]zAi3x% Qd"XXN?3( 4NJ^iK옪D8U[Y*!aM.q)DqH8z9̫Orz3ԨpŨ2UJgcrrH }=mx=l??m=ȥUƶw̨$wm}W PQY=~E ϡs 9,9tm:'gyH+b#Q8 Dkdw_R4ꨩ42sQrcSErUX1,f@38Dp-LإmSӗ \6a-܅ѻ+C<#@wx*3`-պvL-+pzԱzFfp(Kj'^8VpNj`T R ̳5pf$eO3StΑ,GWC jmka(OxkH$ECaKD ^RWD<#cyNc{Fڹ(( lEy>hd'B:>{Dwv@k ^|`bE5f# }ƼJW T) h=R"C#YS0h ^_),F *ĽPc+I]nwXhuhĺ;(0hR+øhɋ@̙hd'E]E,3Ñ@ì6a/a7 0*4ַ5 3".hdG߰P\pɲI}T.fᨱA5>%:)* "b6? }3z^@D%54e` ۖEQ`6͌@b-k8[, 42 ~vc)YQrFfp2l伳zVyD4#1Nok0GQ=6^RyJQN/e~Ј\T3C$&(EKa.q~8}zۻ! 1[hdGa,t{x _cDEB=NP\;Ag[5JZBFЎ*縤Nӣ(ܳw ]$8u97JfD>Z\͇Dhd'CWh[ha TƂZZ[!/uE lA&4hdG%CsGq3zf@LckχCa@rMi7TmSO1nfF6OoNQ,Q#&s**SuԠ$`vw2vCa`>ya)$m7-L'!:ZGen1Ѽ6_E8iTXK;kz{zp &]`{p$H:XTT@+sn/ƈʄl F\eqI 7öWr@qtLIE5͵c?_#9Dc{iG0j/mx;NZLݐlፐʄ#eqZiRR|:vf0t$v,uyt+ۻxp*$`KWxʀ{0S4fE#38 tNJ> +kJx\j5IE4ue.e N]iӂV|m]*>B$/B":rG3U Wp \b‚9̅f[>C+bNAPFU='a66Ltn NQpWgwfq3מkګiխeaOEdo+VR3DyV8,֠*8HYz+owٴs Ȟ4i6L&O4,~_LmxQ0_&30%&}٧frp$!VVk㬴NiSZhЅTBzʍMi5kIlDOWjo|ؿf57]a_zol9}Ө}u;~@P pd?GKC@ o-_?Nc)-s*'hr,-Q( 7T;l ,/hqO}2'=8l޼9ۅπzm_s<ܨogkh{caY,`A=_M|X,@n@ؙE~%{KF~ i0cߪ*fU5!ކÃ_돓~?|j( |1f:ݶooLc."W%{=~isg:(X沙ԶזX/q.Bv԰z ٿ~1}}_C;ɉ-KTo@9` Bk Uqh}Mo@mϸid7ɽ&{)?K*_M=ݡ[}H᣷sLzE~{'Tia0/a b6|ǟMQ_ozmKY!Vp1ֺeMkgľukо$gO~,ҷ]ۭy|5eZ;9c07.w{e(Jb1+Ab11*J/AXZ;+eRi^<5*7YXxx;6[B S_nnzovv[;׺Im}787-ս{5<69MH}_7|ɳpm]AkUյvRo&ۧߛi¡| <ܹzॗ&݄ ^;gfvhϷ}axNSw_?AoM[>bo[?l~~ᄤ]E}KƋZ>Y~YbT) >oL*? .`j6aԁ*v_ c?0AaW -Y@n``v[{[D0uUX2'Xsw|5>g|ؼ ET>=8e{lWl+8(+Tp}qU2++-x͗]s! UHB@HPq/ؗm닥K`h,7h=, Y83[1Lͩ'{V-$܍l-M5fM#)b5ՊPNY\2 wIkWޭ߃5M,Ugz5/g/4~2k| )ѽrwԦ;¨(‰F!W01bw~YvFZB&}ImlezFl*n!D1HkgG2Xtn _%YRXtD)AVLXt_ED b&VpNhQpc mepHuňX%bKJRe)*E{նhvp:4j}Ͼ o۰?vsv5,",Dem!pJ[YpSn ]*Y+/Jd*#>5~1YVƁv穄.&Ki:a[na}]OD{>u'?JTlgI} x_~RA,5t2+ XU2ZJ^܏/*X},9͔>xG+o nR+cF_1aMW*b ,*␤ #rA>?pX'=N"_4@竐z1.lQ m6 7Nyno2m{*ކʼyކ8) V9iϨ ޵qۿ2' Q~,` lFNJЏjyaKRu[=49MQl;[3TuO:B!I-8wd@))TGco4*{kzO ے[2oKv[;L,RE9D2d2i"# htQڐ<1dZco4J;QсrL-I4X#{*7u~D+CLh[<R, HiObK)+;?LT0w^SME^3Ew4f:8td Жf!2z7PYhZ5]: 8TW{)D UcL^U&U\ Nlk>8lO$g9M$<1+ߣ85{b̊Oa]jJOGB(t?|;\ 'Tyw_ˡ3X`Xq9رŋt\`&[J^AnhcW]t|3Z#˭`\v{-xc1PnޕX'r7]fN2; 0/.,&E46V7ϫ(4Yo7mz4.IqgG'G7yԏQ@j4zU')py]6{ Y Wtw^]~-="xoM򳇀9HĽZn75X&q5k5V]NPˬv[;b9:vkovOSӞhcWWg701w()lsesQ¶9n.;n?{4ҩ޶NʱO)XP)yюL66ꍾ{8)3'߬s`.M[aK׳35ktj lf3ںEbljD4r .`]i){/h17P_ 1T>r-FgX(nwrC,#SX.ݕT %#oy=׋XNM4cc?/E<-G Xu!=W;bZ>F쨚(1J^v>NaE#wMzmb+~ nsL_v@zu yg~s{ѬՅ]ίkޢmlV#du1V;"0O!Rq D3AgDž+$/֩V!GeI!qHH۔?,xpL!HzbD[EX ':ZJd"svM1v W <3a8QB{ 4Er㚥Y FB G5 a0Ȩ F[❌3@Zۛ'W4_!xy4$m&*'ՂETDFwc"iS^d|+3a$ң*CgirKpu56d =*:XS4U10~XB Lj T%K:80,\ \!Z\]嚒<>zc2 4ibCkBBViG^Ny)Ϭf$HFK*6f#doJKɒ'В*Gs5Zg7`. d4婑Nk1$ZWB^*XK). ^kWdK0)-RA5ND0<ց.f*yIRF#%a\A KvfFB L4_98-C#ΛQb|j=Z29 <79x4Vc2h);{N ,އ٦lrkc}I dLPÕt`*$cQ K%MRV`>;=5Z/4RO9JTE+:2J8.l"%qXA"c+a` ]#x\9x+Г ,q4q  EL!͉ H k4<%NӢޖmi uؔ5,6$j$mmoyD['0I1-a0FB Z7kG5%Gv5"͸ ynbTruR}<.ob*r\p'R9zhx;eOLm59zyT+K c.= I H6f_r0J{ g u\n>1eI<1gԘmJv:{׏g.g3W|Q^<=<{\WW~I IfA8|H4E+T4N+rqŽ#>۔t[{jh؂cGkx),˹q!֐*wpz[|Fe8r% V[w[*re}WsL}9c%;Fݪ;QyuOWC.OGT꺈۷GXs˕FNFFFFFFFt- 94ܮ5~SM+v_BdEo'>e"bQn)q|5%\~̄d? zq]R=ʆj6|RMC̏gq =ZGf=mm^QČ=q،dYZ"!YP&KZd^Qlurlz,OoY͖Aj-R_1ʨ|)* tG~ /w>qjUg_g}\t<~=OtvD+U5;[Հt:0Ho}4أC^i=j ramiԲ疓r׮$LKX1MGvXW^2ǒݏEύE? K<-Oj^sY+Gd I{Ql_2pR`*>IkZԷb' gF7*^;ہgJɗx"#K;2?c> !pOr (S=@ .Ӯ<02Y[{:,,2]G(/tVy琅GJZΙʾkzȨ sޑr)uQo B( rRMƱMIf3|FpWu9ܓ$Hе/s{?&i:~Sh- _#|4,z]jnϴ\Oh9\ˤ:1|th 0RY<}cP<SźXk_2B4%'ñ! &$pe dTI1Z.ADK*Tִ<, i_9iHoaV<,ڤy.pΑ-uJ(J f,ג\]ˈ?2,_a]*bJH֥D_VNşïݫ?V?2c(5LKeS |'N..EJZɴ})8 x]дVlh6 *>ltm /NOasY/7%U^bx?=M7 F%YPಛZs0Wyw,uE-.@_v?-6IIJ5pZޗh) duUo'7tiOxk2aozpr ReavM%)E(p)/q)?x 8 Ύԑg[kQ1::w%mK e‘؀ =7ɐc!U 1ԐYJL[ꪯ9mS]&_Ttʔb1^md /\Ǧ:>V'ӝT<L"oа!r5`$Ys[ XzWis= է5qe@4q^H/~47i8pAeMٓE{V#KW{$f'ͫȚ?\RY܋k2V)`\bہCރ ,v4k.{>+IW,wbv2՛yx\XźJu4 IC;)o5>?W^C8$6;,s2C?迻,g#@h׳U[[L 0Z[jj47qc3uR9mhpT(0._g'[%hwA64V^JdwHh8@F"8d Np@gRɖl0n`ⵔM^_x:<|<]CXH %'w"m珀_23x;*t6!Ս$/a|ٳӋ__yqٷg/ Х.]8h׭Vw47o*֢iu5m]ۥGB`@g2 |9;AB\r/XNr+P.)mFbG&ǚX`*=4D=ߘi}smM:>lzc:=TlʻV;#ֳ똲ќ1d'Z lSHLaOWHc$@'S7IĊZkHSbHbT-B֢wAVaHAkѯ[ I zWE 1wi<sܥ1wi]sܥUH3/.(.Kc4.K#i19C1wi]1wi]aU]sܥRA\ܥJE'셓&e\@Nh15t[ÅɬdSޭeۑyb.y U2s*G)Q6vBLrH3ʥȉXY.@E ģyټ͹:7>:~s[ i3:cΘ3:cR3:cH1_guޣ|1sY\3e1sY\3e1s=\vRncґ FM+>= 9M=@,Hs \jX?1W[Tq1U\LSTq1U\LSTq4ފcQ*H*?+45Xs6Z0j d0y>3x8NDDn#yGrcTg< )gXxzyra=2 @zy$wt:@:!?hL;M.?qnmQo/ 0hG cU:\S3|θN?ĻVrSOYtLpdҩ> xu3|r95\ 2f4v4ӖY{{̙V1Tk% vn>T- e3im}(?{LfC7t}ISM[d @=1-# 20uu5. L?*:H~4?K-^FS%f}jCXlnS(FM}˰4)>O? + j`4dz>7=Wy*q$%O5Tc$04+i'-`u9^ߴSL00_rIg613*b>v[Zÿi PQyV^,U+kRV|}VmjOؾV.VUR6JVIڎڍnXrgo,(r $gA=~5+גC[',짤yďNn2p{̰YJ!HB,4iqJ<8 G.I"@Emޕ]ϣ F0SIMqТgajxv>%LL'Yr_"R k{]N÷cB{IyԶx~qym߸+6yc,wx w/iߩ6+ Sb\L+JW3g(<N|hus_̖,f鬦Asv2}оO;B_Jn2=l)_; ^^tzflvِ%mnVh4KsS?jYK!kj;j{MotBm8m{76` n7u {]i]7ۣ@%V擅jT"ET!#tuR3nR 6s!.M& 3SɤF H,%_VMtw)[9Ѯ oݽW$[5B&VcaH>0m L&J< m^ёl5ə&0EEvZmyhPӨ^X~P"Ak6U;MjAE"qƜ K?ǂI&YyuCW=SS`HrvEHp|q %2xvJjm6 m6`f͕69Ue -ih`GUK]K]:tsi a`);C 9o~␫^x8Z$0M`\j7P~UDS>)"2Sͤ (_Mʥ|z2R@r \eN1ȇ0={K^ify.U>4^I;@} JDzw!YM0$ɶ\Ǭ,$lQoȭzk["&C#`~JIFHRm7BD2c\ƕIu#[Pb -vOct ;4̊b͖{ + Zhc14 o'@l<{9h|L!An#m܌1눛œBw"~Q Y/BiUX/Ai8!8}zGxRZX FtW2kw.4Q R%5F~nNQ;BXry/C'"`q9ߓT"GPBv_s{o LPZYj'Hx?T?sC^#>=HMU),9,m|ӫ?>M^<5OG<ћcwדi+q8l&27#— z.pv g&F9fTH$'ݗ4ZYuYªأyDSE|R4'oLdE"E}#٘ Dwٿ1wu\ffPꝙB^SOD Ϲ6:8OHIfJ(V Ʊzg'; Dރ*Ϸ7Q*Ѡ!Y)&ˍ3scsMw*N{*=!VgLQXB*џJtA5 +s}|g`7fmC 2X&),S pTf}N&3a)l Tc94shlzX&dyJ gw9;; =^ǔŎ!8Qabvt{E<::qBMz|$6ʭ} #Ċa6 [wv$Qׇ:РԼkƗUwZV:ke`Xn4E'ފbR+7UF)WY!urn;`+c>F8 O3L ƙi/ ljuY{HY2ex.uP! sȶq:#`1͜Us/8+ Xk2K"'9C]7G, qChByHYjs{%RLqRgݹ'3{']  2KʰA,MY|Baz޻p.1 d{z5וfB5 pF#Ih-3D1Q<;w'CCYDw?oFbTn,ͥUCg. _L$_ +3$Y}&i˵epװ#HnSy6Cd+⌮[ϱN|8ƈ㕿Ocq֎gvlã ۏHˋ\`;cnEOr\·C ֙'n@¼!qz..QM'ʭGÊDGل1e8gGޙveM4{@tFepK$=cNKɭ$(򂰏\2|CyfUR|.JPP7d}*񽛤]_W|-\VoWzٿ|͜> PmsÚ&[ p5 VwI(cb3r* j *Iniuzq}2 B_Du;KGls%; d,l"QӮmcBfI.?]ᓃVQVO>xl=)}G}tj:jrlyEܳUkf=BrnZVY Y+kaTT1/H^c>薑y<qyߵ牻̔[s^/Oorp_=ϱ%eB%ilMk,]ilw!IWd&ih>F|u}vU}xr]ŷ׫\AsxRpMr'YM ={6^䘬VLv$6F%6U5rFEKtLEk@nSa)j5X^TC?{FbUROF&)T0nJ"Tr33RZ0QӤB3j]^e9Eqf7E6 8>8#ԻfVH) %e76iRLj9Tbi,&3+$"FzHSL-^rJ--&u^_-˴:dY=ZǬJ;І^{ mF4*)sPaf]GqK@cVYjO;"0J)4Jt)*k_c37% >{(.Q0iJ!B QȎ%x!,;vt.T"yˌBi`L9%X3rVb{UPQAm> h-͡]K68U& ' %jɭZ<Īut$@2h ",@8քBGWXLANеYec]Q?s%xYPUSh(Xa=Ҵ1 k9 ڇ9TUAP#{*IUJ'0'UX^b ~J`KU  * +5+0Ȍ ܹl0bB;q#7:@@=-V(!Ȯ܁pYV ++5 e2aͷ6Q)NF7kd0"q b004KPI!0ά :P%@ qAAN:2]:Xi65lD]`#);:/ͤ=kyGD#7nH_($6d^i;TW.#{/(uYh 1ҳ#q}W@BM6y-dJ)&:Wtq aUh#ǻ=sAƅ །[6![ŌUDZ1&"9ih>(ؼ($0D!N&mr{kn.*}t*&{H\!iZTUFB1L: !'q֣2E tnp R|$ Ljyk2!`(qcZi(^BdVEm{t88_a>wy4S U%VXB(;KSv R{ȋI /ѡB-10 zv5Ԙ jYf}"ۧκ$!H;V@N`j _3XC-D;f bro鎠gc*a (DhQ:fƮ8XdЙ$ fj2RA ٕ P?AjDPqkU[PaRB]$1#dlDH!(ƆrĪMPk>ZlWtI{H$Q5$YI$e(m@VӥGoU4^"`!-AmU($`F}W睩&ZJmm`=A;w+p~_cZ}]q$Yj0fU:9 6IҢGJ$mP("bjЭ(z֚BdN֞ +JOQ!,iJ ]\1Fn5 q".ԔWn "&b9\Y 5I7"Ʃ %:)jI-, ,ǍuhD;S`]pB[&^ދ k@y9 b~NkM]q(NyuKR!FׅLl:7S?~Wn Vp{"_Ԡjܝ@r[N@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N2TZ'Z7N ;2xvD'3w0ֱ@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; :W + RhzN 4@/ @b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N FON 6j@k8ֹg Th%:"ma'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v8>ZukWog?VSjmn_ohwi.Wgeqqr $"}2.c\Ƹr6.}ƥOq ڟq_Кg2];];|%9itu?:4tu?A=/+tW:\Sr m5 Pzbq:̖Q0`voz;q*йU|?y&P<;@7«דrZ P*G1L2Լ}@3+S 8ݗ_Ic"#hl v@mE*L/@fW_橮H/1ayNcmx~GKviHףf'sV0sv;̠ }ա|0Nglq<[ f ޓ^󃝚4cpl-SWQ~H_.ƣ{tO@ܿo(ϝ~O-' 9Akq_?V3| X䦽`o{ |xIJZ/)ɶlK<챣L,qD!]bYnlj2SmL>'z:B1~r-l}yE$DcJt#FArт \`g8bQ7( mS?|Nt@AR [XkAt0|AR؅F!0xK{S33Qѯǡ?y]uc W.y]ia彪U)gh.YDZBqxDđHF ߬2]CUw^Mjzu&ߗsVRU97(/.Sp<]o23Aj@E;jLc;Z:dw͍l=yRb'hb,DZ!VQB!r]γBF-$Q}+/o/E77u`1_OIpKpb`~ϊ{Jeʈ{~ɞ>x1,Mb wbق/O Gw/h1;zN ʞ-iX nK߆ƎƩ3f1|eampW:dUUcExklvCo{Othzxz"~ `o]5Ha~_/M,o{?>+_ă)~)I'6+=?Gp LMl֌,MP_eC쏲=o/͋_zݾ7?g<_Vꪊ|, D@A"OF]s7sRku>uM YrYM*BGUvm\KǀclՑVD6+JpCQZ)}銦 r(m4>>lso.ƾqDsfaKFA?1ձ@V'2AI˹\S 1uh70FC RCpnK(_+$'Wes]&Uo-wK(qDh&HN/l1tΉxå1CYL tFwP\B%v:{8pV[F[+P,շh7 HCՓ-RdOfHH_W"RjmvV`%I$ D\:npqF\$$瑗44RH1B c-t% (H+b9kk(S1eRAx̋bI*)qj@ +܈$J7hgF cT\5#g7NZH✾mE "bz*7{]cxPNNB'}~蕇cNX<ĭ#ӗw#?~/RfBNOF&\C)X8aUD`#,DH!CQ:uBZ|1͡Cța^QhX` l0 uzow2^` 7?M3W$Rw R7ލG@_[pbpCT5G٥Q`0i F`myvHDH 2%f0G1T@.:PKSOdK&fN~2!Dà 01`REY L$bJ"9 D$t}Jc;-r Ծ3S*N< .k RUUFh01 $c%188rzvu6~'D)m퍳.VvYf9)8ĔB1 i pp-KY@\;T;7,[O q;Ep║Dql~4p6O蚋Jm37F*O* Em[iI$~^<`D&h"9*RRsb(":yU#;:OkȚi%ZbEsd5*V);AR@0k@["e̛i ݨi] =%V8!mx#r@ [d?GmUqDn`}N}bĊy~p!Wp8eyZɘϓ>;2ˋ41N5]VG}o/ӛ^Bk:Z_2AFE\Z^*i6_5 o>t]I|z`?`Bpam>)L@0sVFK똈h5~# f X2pzq4@lA+"/]B]Pm'2|-V[LF8ǒ[a덟˞!\wϬlalvqR|ώj=. (#hsQ qeQu2%Ziƙ.S<z%Pic S]LziEl uuZ/Ay-ƣc"|e%goյK9Ï(ϦQNW~p1e@18"yzgs319n/txoSDžD1TKP.E̎ftVuD=YnsM|ǂ\FPz8ݹ>wj񣢏sd ;:N bg#͙x׼ [ACm"9rmsg-~UqCr j~ӆs3vqBndh!^Ά'TJOs<:h~NƵ>.RPkl v>!sZ-VdQss/rpC3pu:=YoLVmEg Jˍ`*iYg3낝 <~RbAE+z&そG98U8M;v@#up*ad!q@D*9D.Oo.;̐!WjL4ff!D hs '@FmTg5N.>D1-x̅c\h t'[>({/?afXl#{6&0Xr,0AA$YɱDδiĿ~?oyT{֟+9r ]!p]cK\ HJ fL Dr+\ ,Tkĝ0a7@6Ke̙/_ti,sVTG)e'Q8Gb.jf}^|j̯nz/ OG1H`~wxꁄwXv?l% _>d(O<1=ACbEЎJcqW߼Hwͻ]}yW߼:w5W{\jt5W+]͕T*!f4Rg}8P^Lϡl|}Ph?2\q(@J HcD vJRPbny= %Opq4|N=ǝfR@Z.muy=ʔowG펦j>nz0MTO)͟&r+it6Z}Ǔ+OUYp^,L]]',MMSm[cV-ܾҀO-\g3.j6m/o|Gë<FU پ4WrvWZxix99k; 44y y٠Ljx58`?bs`/~efTԚ|>`L['߶^{&0r =~aR.s}0* ;") ^,U*VY*_=gڛ AzdVCWW5w3E?l\aϖq@KeC.}~y o'hysF͟`?l2J2{;U/ 1{ݵ3]z~5>g/'4dzA ;b Eup]$\ Eup]$\ ِNcC(Si$I$~^$"o@31D !ν"&T~͜ami]ZsǙL;?vQ2JEՍK@{x\ \ZR#+\KBdEĠsa݅aa- $0ta`qO-H..S4AYpΎ2=#gU1Wo^2*5vE[zBoDŽgpZF/g"]m3sX0n?Th2 YKYov_7~6`hX꽙}bj[-P.9lkZu^.y$S{_Is4=˂q*A3>*T{wԚ>Α1k8u&֒i&-S,aT>i&-S]ێZ̭B\=ioW/,һ#{& N&@v egy4)DݔtR層UW1L$O%,R6;hNU;"q>9 ZKU\y^*5š^6KAd3NV6Ydcí5:MJMϫM+ ;h0; x&?2 h4gt Xt;1F牭^n)3`-X[Q0kstqs}/Sczw[/ʭ@\|IEط*_`@V]JT2h&E 3JڊP/%ynoKw/Kz0kE_]{(MI&UojZyG@ b겋>iֹ& 24Eѽ}V: k8SQ񚚮 d,•.4W2:?yy LqY̙C:<Ncʠr;&En`SKXiZXU*vKЇT>yå O=cF@1gz4‚S?II0U9/]\ @| SYbQzѣi?_l/ύ(lC62>l %pc&OpY6}O:w/ÏͰJ?7uiϭKKm]Vi\wxc wcOSd.UbG ÀT8 IL/p8%b"ZYh6Z8O5ji2q<1\Qk&eR,](K먼jl&Bۣ0\yDܗm24,p%o×Ҥޔͽ̫Ay(e:j,v:A0 ( ͤVXKHBk E $yaޙxwNf)RN6}{:aА;tbŮx:οyF_iҔhV1α8ٰx@-[X}Ѝ.Rho $; ¸ZV!+Ib q-\?z+FY]x8/4֝BX&;_!D!e[X1cབ豉h ixŸ56ԝX5u!s/OgAkr:Dc|jxg6v؟}NƐ Er <* s~]`HZʩR ˽)ת&__ïĹuW" :/ևݻgV lv ooۭg4kB}ϩR>Rp N9ťoW12#XP1g&6k/E9tܵ%w-VVVŬ T̼sزĤZLt2 hd)G J\B:`^ek) Ӂi]IՄ Xs;9ia iU30SAbA3 ('@c}9l4 bxcZad`(QHLd$&F:łKs`&T3  ZvqZimrAsX LF;Gh. ӌxVxTqEɖOu'Or9HOğ%OVaqMd*Q2ĜEr%,ap1?o=&vݏZ_zY}~WY£> G&(&M>=YٿOڜgonֺ$\:I a!ibإ' b"'gbӠ-DaBR!ӟlLPRO(g~J0/ "ܖc(ray;s `۳7ަ_~wok޿{ oiQZn %* 5} ᦦSSŶ&w=6&\3?祌-.}!BX3oi\MͩUNiTxu-S( $A0<΍Wwo8 ^WYЫ F=iAS&_frhtwXwĺC\(#QGH(^#I1҈#& uGG0/X!Q3 +J>2<1DQM1F-= aGE8zB5r5 ɷVo=40MFa$~?].vK= .GQԪYdyʿVIf'൉WkN Xl.i@e} pE/W3$+^!;)' Oi3@zzLJ:3@чK߇n 4Wkk?'LUr1B{߾mXy2nu|QL +Kd68Jpe9dW.sNĩof9nE62s׉vb]'>+xtlꏙ׮dQDNk֕! \(Gcx_LoF;-9=!ŭYyܙE% ضW|߉EfYwh4mLj'&-82L}7)RvX7նM7_[,49 E $7ih 5}4ɛ3-(S˵D{iV[1gx_\#uh+"h첉&Hs+Oaa-LOD'o?e$ޗ_}N}Y.(dl4@k6W _"$$5.NL+r%ʙ1G[Oq^;sN+Mz)P{9!Opk53RnQ=sŬpk|1(FViރ2yiC؇Nўdvl,>hVleؚ5l+e5a_Ӧ6!D=ʮ<㞦%Wu޽OfS hO>n5!cR2g6irӟ6<\rCƉB{M2v4٣0 I30~Sbs;7> G+[sϦ A33`1#T"-Եbn~x{˭6aj<vةqDÝŚf9bB]oǒWJnÀxm~X0HeeV OqHJE{ X8>RܢLGXh/+Qui8O µj Q3;'= gP"QH~9;p|ңj)DPcq0K;ȼQ1:z&|BI ,pn ̙CNX`1Z YWmDaj[sHr7"SwBfqg'eȅn [rm^GwÚhyJr㪧@zOy/5~OZN;)RP0saK+YeOgE"{td Q!3dr9}C(DHC@"c,cSHJɨ>GR:MISIwPBMtv{$[ 4k|pX?].KjIozUX{rʿq X xmƤs˷:QJv9H-FS%Ds+0b9ĭؐX$s83[gѹy͍H,=.hMĂ!bd;c:OZ#Q`(Y]w۹5l\yn699_㕍vͶ聖КYVYń[gҪ;BozV[uq1=,޿`^ƣijDD%|B2 >Y\r=G 5`lJ%) dkMr+:c06V;s0':C w3kΔeJLIʥbTkrSyןKIp:i,52HL/:SSJ=03U \;恙sҮFe& NhphH&*| $Pk!zE VHѳM 2C*&;pM D1*JaM1xFMЉ˺ZGx" z׿](eٔ?|[L_*;rQ&Ff &JI 8 DU(iT9CKp(Mf޾ٝj6Ev_}eORZ i}͊]tطs"_BTK2;9rpr~~4y<*R0A«WeWᓟYR_h|܀W |&'q>VvZn0gٞE?&y;5/\Λ+4.J!$pFZx|kB2N[$YB YZ6Nňܦ̯ޓ |`t7O(t,qa6m9GzI%wEMP EMDnd1at:`!@~ET;-36S,*4R%1օ$`FexbdHƌ&K@3bTiʂG%Q,j\1  ZvIZ'imJYKX B88BxSJˆ*% .DHJkx$+6qϔqrQ[ZDNiwrKR |\ ORa#C!.9%AQ1pBU/.鉇ߔTH_INnd[ EͲgv V ׀'v7&=_JͱNdIlزhar f4,\:g~LkxQH;O,8^P`oz%{.Z0>]ڨ4ňQqҚ䈓 `u~rw4z`Ah%#a'_?huwd˷nal`L[Z Y|oX~֋@qK`So)nǣ)5}҂egMv%Z"{AμoOU`L:T 5[*&m0-Z=ks&W'mp&h![ެKY/nUW_EL~q6{ؚS5~K@ KTQ4MFgHh2׍+ՆϚUَѠ S]O';۳^6Z =hֶmeg|04뒖V;8mPs]ޞRY/4E9~qg9Sb[gɥZcKLzP87m3sjc:G|?96 Ki^vʾй { w/_1>ft)] rbt  0#9m҈v .j% ٻ]M.EИ#>%mKBl!U;c|HIKK]Mz&黓GBa8,7sqd825H3P͢OXv;:K\ "&Ed3tLB3ccI;ĉL7o;߫V J[xRo_ΡD#E`kMr+b^N31/VStU/0*\˫{'ۂ}E<#ʠHxBcpYԩb_u/4pK0ŲˎeUkk`S$W&as[#!!)xʹ:M`wW aZA) ,L0T@@+,1lWt.5VBiY i=8T|h3%vON|x͡n,she^j9p굞\0O9&W_:gۨ|{wݍ.?nj>"/o|~Ma+oh6rT<DxVi8x3S1ڭ1ǚ_zw_݉IK;֝洄0o38?(?`7`>uЧ+\2= XG,ƣOAmqq!cg=2*iTx).$X*:b_O۠:L{NĈv!Ă2S5ZN,X[i,^g9gq=03{ݿ{r--[׼8ؗ(0+Nn^>b|:55 JJa)H#c&ˉd M\VoKCrs[[\;ͭ*I ?gxT'/F. - f' Ϻ<`uBB u27>tjb7#+h$n~?,퇽M6.)Տ5Z l(U~0z*҇&(4l'v̓X&t\Mw1Wr\୦,feL*5Nߗ69Cn01rq 6zNjm"bmrn4Ifth5R2+T7mbP$0ǞY3MC9c':iצkB PցlPԨ8Nxr4}rL.NXȖ;jа!>x?\ 8z%py^ qćΫ! 1-OvYʯ_د[^7~΍_'$E?.Û&l$=@A*OHzZNQ%j<)/kD)sǴtfI,]_Uį6ɍʬX wZЃ+5!^[%[ﭬ^WEY6M;kϴ_zƍǤpVQWybܒWWyT%+E ';de毅;wRe?tn"<X؟o?!ÞNY9| 8ɨۦٯ8kj/y>+x 糂g'@S!E0QISR('7Ҡ#B g\k,wjPl缚e13YRܲu*BG:8Zm)cv07hu-n=mWrYm̚M?|7\uoل/_TB[qVvJ?Uzro| 5p?-#!$ia\2FG0Ȅ@H2/Ӗ0ɹZ-J8 jL"%U!, @,9Ah,WTsHr[0#:IauHnCB+\mҭIX ^li#;[R}ap텦*id-Z  l1GCd(˜H‘ȥw6pe-W .mIK+-UJ!-LrJ" % e` NG6IETh">҄R"wZZXPBM5(=lǩum480#jMv.~X^_^4P$jD@_xqW!ǶɊ:&kf lWd.+۬\Tǽ)ň HBGnQX낎=PgBL!GÅzt1|j<#ZhE}.`}vR}x/'f\όQȽvI.|sٺ;$X@m}F ' MvӺxqҚ AJ õN;@O5*([EQ M+! B7$)\GD)g+U*Bt4w [n&wU$jk"hywIṔBc .5! ,mPSR$dg]˵wS\~ުwG3< ^Z!Oxo  >#BAoz7>_skUM:{\/?חЪIOPK+q߫'٣W3|Xpҁ+Vy`1![(sm n*`Jp|O|bu-kkD?M+%iH˫|dik!ϑuϵWGPհ|:-_B,bRwsdLtT&o lN.:28l@}2j;kW#g )Tv U_zHn)DSqD=Ky<[#R\k{z^J|W) o1@1TCaa?'=F )o"=]>Џe}wr2FސtH<1.WIJOAFxI)Y*28 6aIeyV8SmP)‰dM)NCA`YP8+%C!~>Thr/WXaAs)Z. Nvz&^EBcJAz+1Kp eZn*Ӵ RJ AhHX`#('UʗFf.uIB~ rU~AKW8BKZ㯻X9[fp?'Ɲ[ e8 MYFW:^QտWw.+@SN;ڬ'Cgi;'v { ÷ ;"'pF# l16n'R:=w@R 1 eت<Ǔz6fn"IɝEN?MʕdB%o畺t|yu:Q!2Jcr$s8}݅Ft!zM oTuB,hrw=GE]cu]ybypvw2ͭbtwxQQ[9_ ןׂL@Y9G#i00BP\h0,]Ń~fJũQ"F[+#٨y%?Ð}=B:9wJOhgPfW=T[5Qn|~; UΑ.Cx9rxIpͦ(3q?vO:HwG0y]ZͪWT\r't.PH߼77?ӻ7߼?Ƿvo߿{ |/c9ϻn ss)4'pg~ M MF14MzBuƵM.f,'Tqe,UA j Gtx{E,<E2f`ԡ$YN%` (`w ~^2 vna<-?A[U~FqIo<FeR"|2H2/%Sb$.P"i iOj¢?Cb&N6ɘD0Bg$3<"ciJ{D&.aLGbhPi<}kc#Ăc!rcy45I \^F'ڄѥE<MOzTJg]$PgQ>¸=ZDJhH9pVTF5 BUIEFehp]p wgr GcB9୍c!T#Pr8C3jThh,Q\9C;_-am֞&= kLJ%"b<@ZvEb I,0'2d j k0P[X"`VF,~1-29}igjᥜmǯÃI :0DB'2Pj(?O\ʿve͗ɫ  / nXL4r'yg 2E'QhogDԸ?7<>e|KHnbݫ4^^}d L ErR!!F)RhW3X}{JDg|8>A}(8Y5^.pYS  Lgi*Bn܄8-[Ik%]JQKbbp#4REU]ݞtWݘ=EDY i|YNdžBʏ&"\; B:c !,<WN^ #AseÂ&M ĽqHJJ1iO^K\Ԍ&j-RrPgy:CeL oqjTR7pxk0_I,0zll?_K{\VqlўSGG?:-_H.˅~n  $pS.y:Q-Q#;v$aFU&siFD 4Q40AzZjdB˃JufQtȥs"IRllT%27Zc25gZiSgc5tN8u#x=pTO ¯^:dOٺwG 11z52^*ł /5' !yme{PeC'*^qȓ[,]yo#*ؼ"ݤMlE-X9$YȲ5 =9|N>X%;UnTM7x_؝ 4tj\M duv e]{s0.qnͪAYB6if7Mn%bú酃= ;|wQ~mݨz5 32(Pz)lp +>k,BoWydǵ#zMoɑy@Y샲8zC[;"PzMB6Z%iq~T򄊺Ckv~h8l>Mk{ oP|R_Z]b u/[.j<,HM(ytpzzI0 ԪJ ;[ҡZiK.-Kf>|OZ.սs_7@N=EӸa9&eU1W\rU& dv拑tXvH_$?](0bQG"`"RSFDD b FрG!eLDPzk3XS7\z#pDTδ+֪/,3w92̊,|Yst{fuw+ζ6nob-qS7E)t8ҙ//+;s-331g92S<2S,C'n늩cIoP;IEԽTjfM`l==i>d\&ɀR9Ixt\$:7nʳft&f l$4cu{ΔHp9|jXvT3IM.o1A[enAh DiL<jˬⶉ{~Me:C4͊gJ(Ɗ1oOb__~L-U~?-kg:r38ty.f3YF3hf,e4fMLLTBg4l^f26/BJ´`;ƣbAKb$Z}i Y*XR-Od0.Z>MI̓Z$G'>})ۜ>}z;9xOޙyxrӽ~ ȿ|v!W Bq~?n=SSމJ,\32(Pz)lp +>5rUFqmC|]:. B}@u`9ơMvWwz;QObZiϣCš*@WvqGon)0FmjK˪ޙ'i-|:+`FT$4 V Xx>Yz9<)hov:6]E1Ge=K▱@\X2tie$XLjAemx6ɽ#:C 0zK0A*8ʝ7\ P$eh՞<#,RFXp4{WcȴNW,|I 5V1z7FZ_BT/; }Tl(dY3 }RTa+Ûu}kt>~1GwL抻_2/a1P0{h:C:`A__Ꟈ, 3{}X,Lz6ӶBg;k9&Ddc8 07q-G^)"̙WT.(Äz5SWvh5Z`(3*h2:ڀm:T KHSEL9wsv+U;e/2L&}7ۯȞyR tS<<J1rGNcgmA"_jY"Mٿ|{Z #_]q4};= %->l_^.Ag_Qײ7lR=I);_x4>n 3 2THCKK)FDolJO{ ,T@A`'$ޅ0`4՘'pa,sw4 d$kc=byZ[\/;T6% Ek*mP^J0BL H3VR8p(3o?XoHp3*e }1׬k-moƿH2OF/mV56ͰBuѦ婝#!)%5dăc(%QbD Μ8:W L$9 ,\Ʌ^YEU& AZx>mqYV֞w|V k|9y AHRVa;cƌ NZFL&ZAp+]Bq1ggy< i':"EfVBk= b+ w_g70$5xмHvGEwo> HB9UaaRu݁"nлrH&WטCތqVjQgwW-U v9tx7_ D?md>ZZ|SNq飶ěU#XP1g&Ϲ>kQq⦈8F)XWcZaLG3uS/N Ij$f.cai2^4)VQwzU/"U;9uR#pv'F 5Ω`,7dR KE86>T%7_V..]eY2*\x趩?$(,6 AWҔ F.)VẟwUYqvpsYsa Fq88=g̮`~*8gfaau'o颫 ǝݬ.>ƕL4x4j'zsp1mͽk -N.Ch9$ua}LJJ-Ѣ2q$1b^յ7iʀp| J;]_˛w ?lTryChzٻ6dUg}o\R+@0c٘DD'j5wTr;՘~槧l;Cvsb =Nޅ{^02ď8=B4~=yGӲǩ9&9G\1z w5%ièeiè|4}cA0y6*+Jj uTWDp3RW@0]碮JT.;uͨ+RW—&}W?~YdX4A*tȥ(g\)sҧ3txBDG3ci62˰ɨ0^D ܒf>"4A+z^`&Ϧѧa)룉`=va޿zZ֛_Ky_a_^Il;[B5B۞ h =E^Zk㥭t ^\k; Ns=\seVrϦmLa6c1F1sɭ`)$Ť˄&fZF7hҲ[e4f!2Cq6fp H1+rXp) c$8bf\>v_SPFes:Rv| ,ປ:\&yuc 1/bk-fn1e8v+2W3W::aARA s4P,9STrG@"Yա{{DuL5ь4hiz*ƥHbP,ŠGп)k5GiaѬUtHRn&x9*`yˢ`b@XbD h$-ȹoh=0p'cC/p+e=.HsI`% >:DS2yC̛M x UX}NP2\)siHS'e/;x2Z*$L f ZZLa->Pe4A,HY,zzUx)C0!a0 [X$"﵌FMFS.{Bf XίX~BU~3i>=]WVx;wNJKSȀ.&n9.?֫Nt枣vlBZ禯Wkr5?GwW:N4h !ٔxU ̂.䦓,YydcNah4ME)XZYh6!7w, ac:}Oa|}Y т5S+^j}tN.G. I f'O'K+NBGc[|05֫ L}D=Ȥ ~hlڶLWnw7N퓖[rwn0b+\}@X_NvLSz3.<1 X'fCYw5\*fV wLRl[G6LZ!kx:6~*VcRIȜ\[w`i*-DDC_jjg <ֹs|5(j8Nx[rraza:[|ܙybr LpVazuecڛEhvue뒫k$^l 풡 6te7I.t<\ӗh VNt͌m#%Ra苓&ϲ޳ff!ihlBH#,d@TDε6k>,qEE)íg$,uES&y #"st UYJyLZٷk-Ys<=͕~DI֮#_w5vMԷ;!VnuYnhκ\gk!8?1Nr8He$k⭈Jq{d p!wI sYR5Y)ST*P haQiƝVc I$ǣqǂq0W'º3,e𙼦\3$%[i {0qY D#S'Cqcu)!13 L 6I6aqDSaBh%U wԮ m&75D8_;QlUXq.\;Lr\٫"X%gc򉜅i6\b%wY>FI&?J^I{wR)O$佹E.(bdMҩ+X$"u("L J1uf2 vp}08W@0M}(XK wϦ K<<;M|D>lv~|fa\?%BN/./ E1W*`uu_Ƴ?V 9*ٍ7Zgef_ w/ōՅEpvX*2#|v^,]p a`'f8A3F8wt4 iaY=Q`,&K>뉞_^N4JQl] ZFPKH{4v h%!>٨ Sj}77E~~пO\ʮh xr5 %&׏d0GEyOۺJ*S+_?,ogyB_zWNק?z __'_]&$ ǓZ!0^=4Ul1n _g\+7q)c.|B `@_Uڍ{H; rtK<,foG1f%=TtaX/PlݞӦ"`Wqz܃IMot֏_@/l)Q0G (*Pn)FbGaM`MB]W}fO=l9llɵ^i )`g)Oc.*5XP2V.e w ˟^09wi=RXHrq.`9OǗ^r6$Qs-9˝R 3]_j!j{?P֓wꛀU[%}8jB3Q TOxӲNd::*I_}S"43z^ •f*Tb4ފ` ~;j#Gwo^nG-CQa rQP&(`B sq<L^T..$:{wIh&kn[rgponypv:ޖ UwU`j<N޾ʼ2#+`r là 1%?#rr7k0Tٻ6$W?]vWy}b=b<"eA4&)݋YUHJERRˀe*/WRǠl AЈN:.}4rJ 䡴O/}H&NoDxF~պE[1Sw_f&nVidQJp \ C8k(Д5QkF +56vOS{vTZmr}ߵ0]aŮ9f᷃;4*c܏ʧXq]?fU\d!3_]"QWl0>6C2]Nדũǝ'sH-"+yhWn&F lQe~;[uߚtj̸D\=' 5Q,,r2(R.C>ѐ3˜;R*hٿrs|~},K.:@f$yLBʨ-cB@ϒT*q*~]f(i.巄^DuP)6+m8!کߊ̳f>O*k{׻'af=ga>)E{d?59+5)Kq0hI<&ILk9W3a.ߋе'#*6/ܬ=avϾb[oT~<}_ h[FXOE.Hm!ćuYCp:G,ۇ 0y90hw1XŁH))SD\DC`JȢ(lԤk$b>cxž8 {Gs3 AE ɀH!h5jn$ 0rcniib|_dK-ʡN?U;rSwЅ.,4{YxBlӛX)u5#A W[[0U"K^Uh$C}l+b@g$rxِ%8fHx(}2gsfsG\˞x8Ԧ@z'qpvC՚~ =,o 3UWm2Ηx5iY7)((&fgTN>%IWtN`^^Udh ruԲ6Oz~l6=ܾ[[^EY{Ű|G㗦jldl?$136'ua}_'+h^f| >:32YUL*'݆t[,$w(pz>*q\o@5 >Vw߿0{r| > 3NIHa4+?}/ƻӥ LZ6$Y&$pZxDnӑY3sx_MX>d mO7vZ~ws(ͧEXRwRYڵ~饜>\ɋs̊,p3,qsܭ/ %>d OݞΈ{8 oJwVCZvvx/8NFeͿd>0O z@h>X( GЀ'ky]1u,}Wj]zJ6_\l?Ȟm2`m | R܆ |tlq&j,n,K=I&ĽVU[;e[v'],b̶၇F-B:&h=b&Q\2kIX3nŋNTiEYANZZN ]<wâfÎۓwᓐ*eak@!Cº-3*AʌTRɚu9N\O^h8Ӣ}$cMu_^fwׯjV+*(7:K(Ry7EZƒ(S$ Ś^W9FǹvsEd 8lFL@#=#F<ผ[P._!m)Sv`YʲT\>mCQ& ,@`kl{L^?)fbiӇ__?3M5]wzoo0(vrKۆןmp.NmqNg8'\#LʷFR5jE6҇]Eӱt"ŀt9Mm[3iaq-9܈a-Zi\?~i.՟vRlZY|E-J3t2k$F%neYNqDq~*$q?_},wgi܃+!:] ke*@#@f`rD dG²EGd{x z9$Pa a;2SIL쐌CL 2{2<۽ޔBTŽ4|s}p!9JpNcife>fc\D֕Km)ʪ#(W]}R*++UAWeJQP ځ &Wdg^#sa&Q>Hɣ&,FbhmY~ G\霹,ǸV9kdN/` Z|<b|G5)q|h4zќE@:!aq4Z4Mr-賛nڮW߼Ln&qVetŽ,Wӻ|Ѝkz q΃ӭ:&]7??H0j~qc%o^Nj==4@NDJa8KjBS|gD>ME ɉ>"d u2"+SJ) 3лwESz|K z|<e^EٱEV։04b n(r",PT YQPEkWx5ATח].AZ5Ъjc ={:>aV IfWeeL Jz 2G Zku>YZ\kiJW<%#@ЈYN:.}4rJ !D/&H&NKfs ү-Y_EpިIl_p57sɢ<8j-@́qTQюLM& -{{؝OԻUV/w- vG_5K~;7'o4WXma[bu>&>cV]i>" H*L2\gEd O,;ד& [q>Qy\Y)\r8 I>[ݍw6X4/t o Z-m _S6 G* l̆ːFNAJd.s>SA toE'I#)dYt2#AcRFm2xUv$PWk"0nu]D/%"7FH%Jy&mo QȊʝ޺{kҠN.= d>VG>/JZZ59Ig; =)Zga?)+{%r֦,*ChU&eDl&Lk9W3ɏ؇/|@ j,òwܬ}wJ*ξboT~<}_ h XFXȾOE.Hm!uYCp:G,ۇ 090hw1XŁ6)SD£DC`JȢ(lԤk tUb89@؀ccrƴ"@r2)-F͍D#YFz! }`< m@ڀC"vP(ٔEN"睧޼hs-Nlae^Lz2\.%=q;4Jdžczbmeذ.z[5+g]sh| XYxeurvȀ)f;`U;hSo YY]<{@(K4x`٪1,@åRyRλ,-Vh0Bp +xC e \$}# u)B9CIHKsHo3k$D+__ub\NJT+|sfDQ{{żK!ײ~잔}* A#\dh#GB$J:sa)(9ہ9$+k.Tq|?+HurzR(Fa4RcI(D! s:GO@@-UAY x-+`vlPxXY1HהT%Sʷ*$ !phL?}aK5$pyI@)!rHLD$큕s2A2DU pR{GnKUtnÒm?+>Gzg0dϕVa),KnǛҨavGlO?/lχNŝٕ Og}nxGdqϊ+ >rK@3Ao{6$ aM$ElwxHl+W=8J4WS|{tݠ-Dn\RURMuQ!?K' IB?DCWz䪈]Z@% wqǗ$߼͇~~~?~vo`_fpu ,]Ac/$3O5Z!0_55UMm.f޿K[첻Q`AfŅ_\+·:m)nHYْ1baP>hUDeܼ.?}p`#4?Nj]m~bFcʝ:sv0< HTE#HR489mB]p#$rV"$^G~iYG#s0n`@c(N@Qc҃VHyTHw!T+P0hb H{AXRl2.NW =?jhy^'/C%=TJ `q)su56<E}4%(wÜ3g*+'3}rim&hYu6e;BVa^A"|l7?I;zX,sR/LdߕDʚ L,-yF:H*#Wg_v(D Xݎ߶mR[0;m#Bxús7jő0^NRgm't^i+Ԩ|u"޴U;OkCYZ15̱nIX悻1׭6.Bh-QlWqm!m;JR[EvA~Q'I?L{%å; %{D 9JxJT ?wo{W/F o}y-7=5°'oݸ2̗}bqyq8 lOnm5]of VV1l),$_zn)L ޶E/M0uKK )ib~ W!13'QI[~|[ޮ;zU V'Zi=cj)*ښh a.9 і C+iKcz7 60"OU/EIrQ\9*q(=?&C!D@)rlb&<#X=ш)LbUd*˞L^d*AXWd+,24KWɈTOE\%h59vq4Et+W`GkJsdUW<*A{h[{qԪWF\[N/4RrOY|)>O_6g!B|ߛfū9Y|=L9KϿ*/뾞E{S3ve{vf P9˚ a8\^ֵ(v]g#|a-e4g)0EVX5II;1V֌xQr3 c/^6(nsˍ Qe領ܑA0~EGxlO~>v3AxrR[Óc'c4OH\%x2*SW Z]\aҝ E L-q-E. ukWK;JQ>}*黳8z\> uy Y^]فH_/^ W:dRK1 ^RTg[,8 ;`i䕙4 (i8.bbj#,V򀑕95FE V»yQOG0[V"Z5qsK.8M"ՎXx0HLtI-x2J3Mݒcp!ӈX(rzÌj EA»CO{h V-\FIcq_JDsDH4 >\*/ |(Ğ 8)T(=**:C6hBNx^iioAK FFU9t+.b[lJ `e` V٠LfARL;85 6 Χk[޻hf;c6lٰoޭMH;5ae*u{e0_LE 5҃%mE97"\cP c$8`fKzCSt@`6G;Hy0Jx (J̶\5p ^HGTJ6/ۛ*9dbn3ZEÒ[vדiScq?M͖ΖzxT,hi1UD˱TY#M!K%RKs[5/e&#Q0:B30{-#chnнiկ&7y'Ϛ~fC3h|ټL6Ynծ3֏7U~෵=g>f2K5eR 7z4cjQZ>k FЦ#1ܲIӨ?.&> ogvvVyx@pe;Anf> YI͇݄;#/^>xy`77C*V?^ts_Śh]$4_27j4̊ddzC 9iV˯^mwՁϯ!˞[?o8u߼Iھz4S?NSG/ʻ Sww==RO`" j ..@, ZR/790}L>3]zۛ.&y_߮QÆ :$[i#A rH# be}0z)#"b1h#2&"]obeD"iu."VD\;QxC5򅩛dzfcL*->q)v׾0U-@}f3ޖf%YM7G7XH>Ubh456npb΂wDðq I cUGz.. GiolCY5T'6}/2;\ÙcvxYwZWXS<0S,COܡۊj6NRۀJmlV́lI۶!O/9;&)$ny1o.-ߛ2|v  ^R},qoIv˹a^iEISbufA*ת5qk39 3ʬiA&ef`Rs0޴ӮmX'UbҡqY,ʳ݉ᘷI ѯ/*g;^%2P#Q9C/;mdk&Lxe1o#b%1|DVL<28\o>̮{^&{0MFU*p આaLǮ3v+-MG/:~SHIwSTGJK5x.AFe"'r-$߃S¢)&iƄUςJpc|]5yi#01japo} AYE"M# 6LjcM}dptLj0؍"xk : ݾ1|fgerWfd݈&;̍psˑFH'B܀Uʥz6Lw(*;y-Xhm 4wFm P1$`i[g;jWwI{%>Eqkޯ<,'n/,iO;hVcoPju1$4ՇoϒƟffba*>͏XWeX xs)oCo0JZ>_ao%EtyZIwۀ\wWôu[_ͻ&*|w)~9l׬d8IP':0חIԀg76'~A*[mh{$Sb,L=7C@qM睇p3a}񚴑E- QF)ic}Mٻ6$UaGÀqKI.X'F?%p(aUσ( IÇ Y4{U]]wY4֓x-)U>ޣ^=v`rrkaSf!#!-i  Ƙct26{A;,@2Fϴ,sJÃ&t=URTtQV `Uª%>pʉ<1հvHkD缺R44GeB䌂*cRb2[wPj(%8Ag~B `TP ۠M\@|ABJ`4՘'pa,v&ZI5:q2Rضdm{c$3T=W/p746(L@S!&Jr) Rh0 D o;[ Sw^ͦ8oȮpje>cbLR3F<8&Lb%&I?RJ6wZXM@zHsT:׮o? (tnb+Z 2^LG -zg՘^ˈiD ־ ih҉eGӛב!gA;bR'B W:*ܓ ,pg5cCR݋d#kxT~8!,ȍ$S{+%Z9E4\LKci˝}L559TkO[㓧t8g -'5|6vAS5Y6ܨlU+wQC䒅`UaP,n4X..5jFuΛjFd>Zҭ;̽SNq飶*d0DPRmkYW8c[]ZօӅ{^4;3N.nֵnosWS|x F7~>}F&xѨN0+D{CJGt`ZD"rRa-9[焉ڐȅ4*C0 A|@N@@c}94 dg$cZadg($^2 YobA% 9Ij$f.hC:I$}Jړ=ŚeBEqig."p#(iijO&wʜ;Q2ČE2%,a9bHa$O;Fd+kq uˈ{xyl5ĉmu\n"2冊ȹ6FCBY& iS[=B:Rb&y #"¨U1kQRk R5C0e']׭Ҳ'J.V&Tz6J)BHxȜj7mauAlt+UL h\;Mc xmSgRhh$x>:OK β; #)ST*P NF03,*͸Jb `@̡T7LݙQպW6OnFp|}"8O ̈28_\+K|eEhßE7f8[Ab5 q$i< Ǎ,;CBG UL;h8:ٿOWMN4JQI64WQtj Is`&.u}?g7*!T(`~M-{?]~=oKaulbO&[CWzFUYìKkukI+*S+{?!ӯ?ݛ~{?߽oÛװ 4- ߉x _o~xhp[CxT ƸYW ɸ)7q)c|B `@_tHb|Ji|nxe̖T6QCEI'5}z1t0MdU\y4S&6:'&q4(rΆL1%9U@QyHi5#82kAOP2<1DQM1F-= aGE8zBrT$[IB;Ih,JEUUUdyʿW xcҤS:J?`Ahr\Go_w@+Dm{*D%S ;EA@(Klp& !gl/bx">H[ Є! ލjn[nGo"ϖϵ19vy\~T|Uu_VO~)[U?Չ8Ly|_7?+3](.Q@pc)i?9} ?_{WxAK3]2C{߾}+W/`)BBW1.+|bq&c|~9EjF?(+ fltf~3xRU; ý֕@dn 8 z{ceHn̟fٛO~2^z#;|1k$d e(?揪{xe'%P15&.^dm I|u/d4[nڻqAƕ-e_LHၱiTflP&qx.c\q6fp Ԁ\s5J.Fa,\N:T~J#`!r2p˽7X2Sj,FjD3)Pʍ"6J:2,"1`Ra'RHDcKZ#g=: 'ΐ 00ϒ{1v03B!%.}A#ALA{.~£/A +JR)ex8i$e/O^FKiwbAKb$Z}i Y*XR-fQD!I݉+mgR/SB v74κ/ttf_v]!}u_rx.׽d{qw0Ɠ{3J%> ƬxA4ۂ!ZiXI,KTu5='='=ܥny>^ӸA6]\$VZ2ao 1aRh2?E>^5dˆ^FcD2Y+냉KM-a$EV 1֪!/']NfA60ӺjWc?e:C%)kMJ +=r:$Kp3"5SDc(Q"! %K9 Q Ƒ6&m, |p,8tpohAG@%{5I ?5g)\\$N0'Ku' ru~dBzW,/ݘXzݫm'mzԖ6l!?nۆK2]5<:-wJZnib[/Tdտ]}v +"yar=PmGkn9^u3`<D=jg: a6 {GVry&Urv޺GNPiaNDN:-VCc|3h(E݉%I ѯ@.O;r*d l# d]3f+㘈!3#[Gnzϑ,0!e=/%#}xϑ&`PMD0O&R"w^DJ*|5u5šHsfl gE\LqXt-o<Y,f!Ynu '1bePK5?[d}o^QVК @h.P(Mdh՞2#,R`‚S? ~?|C=^  en.ð7v ߀Zp^zw \>rW_߆a7<.9q'ٴgkؼ'b0Y dٝ-{g,/YeZ-b% v2=eJ #2$ >l#_ػd݇"=kGػ#k2 E$V AIoVq ZEt*+;3+ʬOuY'(I3aU={猠<:^_O}r0c>v`N@85p֤828{Wx!ZR)Uُ: /|nngu=]RQBDQOM#Vpcn9H)@c @R #kd4*kc&PfTetp 2(93psZ=q*[,-՜7Us^oST޲[1'HV`$"v("tq% S.0y mEm*5iatH\2pJC0%1ml|FK髻)EI%o|,zj?bTʧp n_Mç[8+)6T#UUÓ~-uR>x};?x=\EܘQ+R쒣m 469ja#Ce%(+nVi[/q2ˤЇѸQtGûb`is}U6lmն$ZEh8$-́|94m+f#~/p6&l!&J5򦜖__O¥twM'\ IREBr| p4T@E) *7lU8U37\~O}ߡ?oÛp ˬ|’|,zKq,_o~yipWKzTƸ]e][꺔[v7! 4ǩm~S4nq^6}]b8~BT|UgKT>1(ɡr$ cye!?ƾ¹Kp`#N~ʝVg׿Ǔ7n)Q0G (*P)FbGaMk^:$U{@"㑃ͥxTͰeYWxzQRDԍ,⃬]moWYE b/+4fu}ɼV.GJ&̲Ч qy $JnXoY^ ZC|7.M A%%_ wLW7od7L/{v֔#XuHsc̱|6 cƲVmH4#kY"Mά2nۜu|kC9MO߂/3[>)c]溊fS|c4FDOӿ>.JD(h&BB,[ @a4A>SGE"bκJ T:LA`S$ޅh15NX/Y`wdt|Z#vl48 E?|զ zRoOTڠ 25;`N fR+ ʥp$t 0 gnQ^e2-0x{cK̴g,54K/ۏ',5AKة7KMP.fLTI9Gn|Ef&jE>.^;d#2Fx+D.$½o-:5wU ["l Kh E@:X(4n"+g|S]nYqrh x$ )[X1c0齖豉h iv;9'N{e6x>u֟ri!gA;bR'B U:*ܓ ,pg&ɥ]QQ?> HB9UaaR":uv WZ#wU3M.ݩ\NywY"fӻ21RD-v7]bwYi٥ݫS϶̔ qѐv+ô>q=A KET9QBy B"` PQ9ow*h-R!|`\j#}Ԗ^EC ƜBk8%f-(3x,dBƒ9Diok FU`RF]&_ p82<H.)U4Qk %IQ!`V0/2%.D&[qˁua iU30@ L&Gf9AQNRWd#Ttaܗ=p֪xs*Tfg'{Oa$Rx2M ҕ7KgUOSYq};+kRQ),yHH*)-Ұ!'?{@ q!_!,Y]fc6]6[/x}w9BVzv*OMF&𚿔I9g|%@<]5`i˽yWvq- hr$]QfRAH0glB <p)OQ-%aǩ*RߔyaaB,OC%RW=j򼞎dz뚥0J%s':d̝UԓK}2̭R_l0•=+@8׹y@pPDU*|VƣLe4d ӗ0Gl$ms Zr6A_yAB !d,ߝ\jRceB7k^a8}"ߗ,swJԅ@!YK~)Qbz?\,WY! Zq~Pb}Zb!y/KzDŦFm\ٲ?v9W.n9Rix`,(3xn23ŃjSǭ60cFznv\sh%xIpZ#0pjשV=28#TY!e{XJ|L>"ӓ3>_nUrñvo v={hG/GҎ0u2k a(˵C>g\QNS\DWB;w؂ #5Xtz*uNK J||*qTL)[ʠ;Ly1 "^`r "MdW/{W/OT ) ӂY8(--h9*k 2 dDbIs.y\JkŲo_X{>I -SקSwwiFЊt`'&)BN~T]L:^F0W7O[&l v36 ad# O f^ >y-K#7C!oy}G5@;$Ǟ[&&܌GXu2,ffɷEmb̳77_\,-6?mNϣ%n(.:!ZiR: /}*W>NOR'g?Jtb4h:A_U9k\>ޕl:tv6+? .Jq8$7aR/1",DDꥦ0+CʘtgWYiͰ 7Lf͸jT&-=q)vy~z.t_،^!Sd%YM7'7FO-=2|ӓ0wp|[?[Z0bͪx=_>_wϙk8g̞1ǘ+uϡ-|uyd♙by-?ZHzӫc'ۀHZm6݁B%=9;%)%bVG.-Me>3hb P!-2Gwv60>,̬:{IjAN9n;L'1~:+Ȥ LGL7t*- .^zUc9C㠭]AEiQEq8?{Ƒ M/7~$@sNb /ՒT`^DQ‘HY!/SUUU-NZ~z%}R,lФQ!-3n3TQg3<;È 0'R58¦U{M_ .j6tZ{(is}O[zU6+LAmeyɞr}v籅Bά,W@[+B1z-U uY={ÄL J!@\]Nd/LV1Z]Cgn5O۾wܝGBR"ͭ遈{SΗu3tLNZ<$H߾sᓻh7IV+*(7:K(RZY$56[Fbr`MW9FǹvsEf]DD|i9)8= S Amxr|Q:4#4.&EƕӼਂ@LQӕVVY4y~KnQP ЋlY|^rN) Ta%]|4HhL o76vU_Zj&{ZqH>Qg 9 E>;>V|ЇPu]jJ 9rIOZ@C*#s++UAWeD Aۡm*$=>Z$XOߴWhYmjsH8Q2Z[VjxQ !t\Aɂc\+52Gx\uLE@rz!-T[wuބ>W+Z$^ikG]݅kzw`x5h`J]{\N[欌k g,!Qٛlf_ݺ)H/ŦrMrusz̟ hx! ?UdNpRh+-.JJ*Z"t|U9}stwix-ུ $C=2A\ɩMB k5.u--kb! -B"\iR4D&~ntdYPeR匂*Z+B't}k*K8Hu᢬V VQ+8j,UpTRAA*!$nW?Foh%flveUW3a7t*uPj(e^ЋG>Yk4 t(:AYrDHK/$;>T9y}HrgdG%>YpjaW*$y&\ŧZ;idQJpI9j-@́q֐*@\!2딷坡/Aw'x6zw!G^.Wm g_B<5ovHk%;̀FLf] z?3`2 d1~8OU>MbW'aƖVr|ew*QWl0am; ,S[ވ amQ2:>Ë۳ZG\=o ^7ZOqs}N2,w)!EA@.ȥOd.s>ȸMAWeiٝ\M&Z!e EȌ!3XVXzX\3!)ED AC2%ce1hl>J|I JU"\(ѐ2%dQ 6jB tUb<(4޷aw—u:阳1m5*emLD.RPF#KJ6taCӴN:M{NM;h {XrZ_'x^{N&VӴ#0s/VSB֧sh| XYxeur̂yN3yRkԶ%zɳij8h&n=?W"}%&r>GHMEʇ:/ }DaƽOtߩO40N'|譸%6.'[rJx z; vtb\#?y_'dϏٽV3@ ܓc*ֺL4!gc|,rI>bv~{cuǟwkS2%7fϫm7%g*J=G!O;$F[ l_ZqZvGoT-7`>nh%HLXrr:v V,:s<tf٥@ I,7XCKdk[fDs376,*I ,XV=MggMͭ Zm`ڙEˀs ) +G< .qN1T$/BʕWIoZfOM1Z:!$weVR+:6ld.ɀH&\-gOɃ=d)`-WY ! c3hdI*hH\n3{@~!e}ks0YKwAJ[=cU]ǯ0,.hͽw4;{%hNVGshw4;{Gshͽw4;{GshͽwN]Gshͽw4[iho5)̟ hx~mQINpRh|%2t|ʕ`y$tvs< ңDkiP=e`\Pry^rdu!㞀@d<f6!fSJ>tM@ҬC e?6>`?,k@*e },W"$Qw4ɵ.M~n~|  }] d=."K][1ٟj~\֦\YhkռvHkGHjwf14hPMFr(ΡEUVr @חZ`m9U_j/+QcRRmԳA[/xCI6L *H+͙0::( Z:csOi¶BCM#{شSȫUǠ,9"eF't\hP!ȭqz1%0Yi9V8N~BPRhόLQ(Zk;M 눙O-Jz@7y9J cagnKOU37nJ&RC>HP?{ȍS#sfbc}\2[Imyc&EX$EdQC*"3LC2b.h*,%%R v3_woFO]oCvgUaPiB, 7go!oU>۪as/ Kיx/?nnUej,]d y^t_wmc{4YXrE֨59A(Ebh).x뜡"Dy &ֽ5I|AA_0s8\rpGQ?Ըr Ax(YKMcJϥ1TNYmY5Q `*Ml|Is][y#8ŸZNѾZ,or̦UW$TC5/O5_[%bͻ&cO2~}vnfwܓ$~wގ`<]l[ɷ\T%מPԐP'd,Q,5%w;Ls^PT2JR9E%ChE^{-TH1|p P &KPH-b;PYe!pIQ~Sv!xs7[olFşhh46>hk!60ce%> *GIHwDғx KbnVˡHbc(RԮJB*N hQx"^".$G{7*kK8/%Uyk3"pjT,eFry QFI{4(nϜI!䔐h*#7ozfrvCuI"_pfTR^'6 Vo#VMO &tH[ U eGy\wkM­VG$4n4IQոaZyWgv? ,f쎩;-x^Jtcnyn>u^O@2O$? n?n棷O01rvm_vBEn[73 =yg1a2ooqw{wg =wL,嬎s[Z$.CmS;_~sRwׯY݉g k-J<+Q{UXe;nT2x24`BBKQ' 9;nYY)G#OGh|sb[g}8ܲٛC]J6\=஦ǂA_t|Jc AJS ĩv!ă2L,h5T"=&bU[.:e}^v'wŸ!"?2oVoz)bo0K"ìȲ7o7G+.A?sSavG&Oypnwk?Eٛ=4%+_/[]9sgVƬy>\+2ʼn1Ty^~zګɪtQRaMx}" i2.t7xRnHRn^%aq1#2 "B+:=A- 3]x{d:zĽ#X±aG~xޱf <2I]"gq#fr9 GuQ)6"z=S{J+:QTu(Zw9 ޮx^Ӡi=q8r~|9d60.Bbehe/Ky݀J z)"oc)J!.G;@Кǡp(7. YY4xY55K!ٿ g גs6v' mJXF6Lkހ%q%UjjrrZ:I7+ r'DJ!E N,rXB82dU[k5K!5AW ;xЗ33ݬ+91F}4`:ŅӽZ/u #-@ LsM4$RqRM<$&rT }sIOһSz7! \d}KIQ1:z.|BII.` ܙ!"OD$4B@M+»}^W&2q{xR`(Rū8kp6>\p\VP'z~zi\VgK}zKwOxwe,_g1;0#~t<{g1C> !3o^/.[n|uݲv=CWw'(F{9;~qv]J/Z: 7 nLh8Vr*ƫ k1ȁT.1.kEp!Q ވȉ&ĠᄹuPLλS?Ic5&rG1բ<ڦz]·>n}b{ܛDzqY8Iᩰ * AZ%1h>WmieZ`oHN;kd4Jc,o5EҡSTL*yb sp1rvsF 8׃L=/.j)m7ڄvE,dY2|V&{6Q0!)*yT}loc< '5`R`X&$ˆ)y2Kwtbe&P)b@7"DPXk\ ha(ؖF0L5vϖP`q WurFcÇSC? g'/3Q.J׵F+݅04Fŀ4 4wE8֓H b-ǝgKkO*9^M. !Xu ѳ9@?*"4tNՊW/.g4,(љ\&/gt3:S)t`PCdٚReklMɲ5Y&~&&V4TeklMɲ5Y&d4Xɲ5&dٚ,[eklMɲ5Y&dٚ,[-C-Kɔh<p}3?M|l C3x^>)AY8ehp5߮J+:rzA`I.&%W_J}VӡGg* aMYtƫ?s,p&R7-og0E#B>bү>$oVJi$nTn]uαLT,;eZ ֫(RBZ-HPwH1s } @௧Ot'g+uБs01rG4QJFY̕@UE% ^JK2oi{9]7ZNyb_9f69;FaVMRO#}=aAí 67nRH+O\"QܚX@F(*Eh5ljc|9y Dž^Ȼ7]tx}f?~}3Γvصg+_fyK^kmR$viP )J@mrh[#Towي8\rpff {|h4 7>0νԄ0 , \\CE`Q:ޖ5[#e6e"i  2O}|\k҉"$>l#g7ƛIßU$U;6G{sSIɫ )b'z!%D8|bHTM(JKb얌QYdaq,²WYxTY(){7 wNKwڼFw/{b- !Do& 6 !$T@r'e pgYlr'!(fJhmc"^ҵJ[bCX1."펡HmR"* wZ W8Gb D%L{^.$G{7*kK8/%UyZk M Վ85I*ʃ2#༆(G=M JXMiӎiaG`e{O,~mEN" wǬ`3+PbgF6t'_MbT ة}f Mm1#xvA52FSf)*bף?+;(i'0T eXe1q yBfcԠu< 2F'iYm\D"FwA! ;gr(4ddfJ|W~sW㪻?m=io#7_&MټCH,a5-K${}K.$. G||B ieŶ,߅;!Lt+UL (!Јv:R ڦ'Da834* wQYaSTxGDJNR)8̰4N+1BD`O>+$۔ JC/LĺDuП`)mrͬg VZ)#sjEkϵ~(T#="ht^icfG4<tqzI$*F[Bj9rol ץ^+lLW >.4eY™tB$\%Mg"uHad f%r @mŒM-;O|_ʍ:s`v0< HTE+HR48>mBsޑ)' RvJUra#970q1d'z@l(1jHp<*BѻCQN.Bc8]$qQD)O+V&x[qKz\0lru'p9\ ZO=W7@Iksu\Rq598p&&/^&bELrJ͙"<% 5dehzf:ߊMϫ-pzzk9\qv3(l[8"Liٸ{Y I.n2V?4QR8LevLQ+iY6MH*U"4.mbJy ,͗TJ(HbP,ŠG)k5Gi#5" ,(lZ#1G EέRȰ, x kZ D hlDc<:_/{rWUuc[׿'cU1br9gsgsr$O!Ae{ PH=wAVDaL94RZí5Z'j GKqqu ZZL#rUHd@RŒj VlTS%g 5M߮0s<Ο0{SsektPs:iӺwG*j̎]Ļ/ZwQI[J<+Qi6-PtGvm:(+r%ʙ,usҧZCmi}Y-uzr.Q9Ed_ysbA[Ck\>|-! a%3gFǔ@烼/nI rH# be}0z)#"b1h#2&"XQە:uR i m~$>1?H0 nNnd =!"}X{)))->Ŝ(aHt|K ]<=E =[AZywk5DɛUfR}ΧF-]]wKOPfK-a0W,B+ȄŁbmZWL./ +;I>F"icpz^Ȇm<.tqh)qx鶋YmIԘ/,=h0Y`A(32˭ L0v>$,2zmrU@bjL:6~\2Ic|G7bciVUZ &VV*Wn*CcrEoC|.O;3r*h l# h]3f+㘈!U9"[Gn*ϑ"X󧔡12Cm&2t!AyIׯX )qujP{1{K;- r @9&Jc1F1sɭ`)$Ť% ݂h;Ϭ838jsE97~Aj[Ʌ1^H3%O5N>8MTQ^Л^k[7&$3 }RoPLs ZN]L$(l+"a|g=R"v`s;&A/SӘ€HوL.9#K,!l!w"dM=p02 'k'i@y(a2H\#C,~~0[ W enqGnܣ|pcqʇ 0ɷ7O,lo~2cw^A\:9ׯN~'Ä ,,!5O%R17ԛ@G[F`aSe.zUNQDThJx!ˬrl MF|((kGLN5R +5?[*AOV4ơ4RR+AR; \R$`a;ʍ8` iU ld0X 3Bs46w8u|Owi|/^Qv]xYqZGK!zƌƁZ@1b"iZ+I8eUfʧsV۰¸ .oC"61KdE~,[4,r೫تOC"aZ<Ƹ͒ssΦu/ۦLSujtvуL~^$H}p3^pOP\ń$5$|I)Cޝq@e湳k7's x %,bLXkȱ{猠<:ޞbKȉǖ-!'[BN"DH(׎Q 3{ j.j4q}#1Xc^j#qS1!ohǤ29ROr:ss?-KNשVмW/Ozlulw8#/RO p0L h+#y&;RӘG"h\( FC` f}t4L<?g:N bEJL 04ey$˒(H^}Y'Ak_3 .; 䠁y2 V4(螜~A._+J[H*c/$uiS.|~6J+$T΂#"/}J5V[c  *IQ ۠M\@|ABg4՘'pa,5|I]to[H BF$ηj$fj 69?nJsWmFi ܵk򼞋w 6t72 .ZZ߯ |_Z&;!D!e֩L*zgwmI_!e\FGKq:uI./kxH(߯z/Q""i]z8C"Ea?)P'[@;4g,Gqǟ㏧Swo &Ax(>0}AK+=PX:e*2 b2kG4 e&'ZNY޽oSgx9,C`|ڜ5Z|YɽIZ Q)°YGxxW0>%h=9.Ȧ.'tkhJ|x|- JIz'KT)K ;˝F/ChV)1R8*s9MlB2A'CD,lBj#g=2RmBb^pFQ6klvZpuD q&Ra:? lc;ս:7*ʾ6Ģy)Gp9Zg~F8gIErb-Wx1}Cތ&;Ulf_KsoՓ׷oĠٌkGz˫je삡7Zq\͕ϖPjGHuðafqC6 ?(1 W0b>tG1QY=tu͕YU-G[8C mjho:4װД69ɂo2ir5my\0:QN oD\# p,eaԡ$PiT}z5&sf<#\xeط]4)r@IgIGt Dꀏ )1DaqMZ[GHX1YH<$x74r@LyNSJF9iH1h jjǷ7'؞Ofɖv:DvZtvzDloP?3䊋;CE?LDh-_z‹;Un/[;zwp)z0ۉ`J e}wל[]Hw~:FS'EI$)ˣt\%=rfvS:8^|7J" o#DB|No[G"ϖ廩2%Pr,avyw~>*<;U'/jSwƷs;U.¼S^%`e5j򬃻s،Ԝ_^J q\ kW; $Rp]ؐբTki#;uW[ViumY z+^xضCm8KZU~!d0\Mȡ04ׅ "4>يTm涄\ᑕ*͸‰,U {āQfP Aznl $ TR$2A";SG'¿LIExjGc䬿@@.!RD>߈lx l{ۺ4 yS`̻_4DkBDtNP0Km81+0xيxyeʙת,.x£h\KNvV٨"!+UJ4*^ŁyRI/Al<-ӓi=o",O5]ONͻn.Ӝȕc,n]^?}2^RQ`[ϭq95ڬիYv- l9MyA;xlBa8ot|ΛRcIk:6^We"Noi+g[s:iMk j}qͳIVjnn~ ݜQ"tSgow׮QAX(H&$Թ)u2Ku<#iT4_eĒf8d/!C >p|:}\gLt>W[9O1`)kLHڅhp`2Qf$B*g[%QaWq#'Nhx$CtyFZrSPRЁE*m&Yfka@7hd.ZXyՂF0$jTOl14,~ǫ|lXZ]/ N'k]p̼,TicMVŚ'ƚiEԜyOz|!DZO~2‰,nlˉT[UPaP@dr}ywT۝6 Ur_2O/|+{WLR祖JH+D!xo%5I[Ž<]l ;v%%ƅ`.D33*}29Ԅ 0Rtz\pLγkS k)ՓݜsU?B}PVEaB$27e `'J/piLXdLx.lRlgVF|0UQ 䴳FFC4R9XV[\.!:EK$6'#g?g9ԐwdǎPfbkv GXJR3tEG hj$xÛ^ &(AsUͬNBekͬ}6<'fmt: L7V=xsfYEq]nJu JHFॼ{*]i"uqf+I S?CJ[ʊJi$2*7YQ`hltBB2Qc.i 7[H i]PREJ"&sݷPª`uY98/<͵_U6bWsE\AGȭD)e1_]eBQJz4hH4K2;HlņM^ /Wn<:n;k4 7>0νԄ0 , \ Wz."(u o2h b2[G4 eF1.@涚:KYDŰ[h}uF|U$yprjh)l g8 A1TؕIHwDғxKg+Efv~X2.qDZ6jkG 9"8qٜHaQ3V0JhĢ=g\FxT;-P@͐ZўGX"ED1 ?. eģQ UxX ǂcWDQ"ZRДPQ&IEyPReTGK%[E k"g_z QlŒ lG ?Gx]#FIF5zrzsT*wP9#%r3ςZM2ϽOĨl q_ih9BncDwN5RF%-qk^TSɮGsPV9CS?xbp\X.,kp4mEԴI\h&6F G$ ƃCCY;E_pwA!Y-|~DCŐ䝙pkŕ\nJY4}YmtנQ{w M[|]nI9 *FoQ'*7<%h4X0.ajl9TIp%^*߻8@e ZTk2xddJ *UR oIѢ DE'7: +5Cy'Yjqx8Za} ۰?8k NsQss SNBB^81C\^3Z?ښ(@FcW>hcpik<(PURm rY>8dBl}.V}^0833q^8[b_О˞2SFĒΟ 欿ʍ8nPu3_.')>"O)yΊ9=`ΨFs ÷ ;!gJEh]l4F>=SMՒD [G-ZO٘D4Zj{\?]G?S*W痁 1tlj_ݻx߹5LW Woj,z~QlzQ\jN9?ߦ8#9 |b0<_R5߀y0iΤyv3⯵_'?>܎/|\Vffsbn?fK"d:}K۟/m/BFB@i #]7 ÚqeVbbPc`^żBcvuͣ2~$Fm+#Ҩ|g> }v:#`r T 0Q]?Ǜ[οt~;s?}@I혝:a`x=tufZc*)LU">b?yݛK?O?||o>2>k"a ?w%_/"` O[ -547kbhJKzیkJNao_B4jM~O49[A]6Kr=glY)u(8Twpv8SFZ?79}n<#\xe_C;e0R)F<%ϒ$@i!Sb S W̐Ec)zoDH:=Z026󜦔 hs$"4q4c*|{UjǷ7 ^'v'l$81z%GQG_߂W+XOmAF**h^)wre]3B{YLl˺e]KJP|I{0D(wI✻ !U9]EŇ6ۨ$#ͿuxϫXg=۵yV,oax Ð ._ }Ԭ .^6ܕ(b(=Ce< h+Di}"UqhW ]h)_(p[VΌ"b4* 'BTZ$_sF*CF ʦhjr.@N%eL*$ rP?uh}b)˔TuMzovvWQ} R[e#[O4GbH˜sYBz*Lb$*Jk+*rx1mx~dsד˅0Noek9 %CǾzUAҳՖ[k6/ny^ܷ~Gpz JE*0!M娓A\LT/KΏRI,Nם7ȀXY_ k1{a ԰3 ǗwuFL@xEF".xs51F&ZY. ic9-'l[0ڑ;HS ;y  2{P( ('WHz2pT*SUR~@Ҍ3rJ2tL9Bj9WJՏW )NDL0WZ}JMZc3B qZr-q!tzO\Mݵ;|un2hc!Qo:`?Kkݥm?BMYj:: %F_2qj2#|Y>VBHoze :lNjؕ yWhbJRtǜ ?؈m]uoniL0+mH_?À񐵳qI^'X`w OɐTlyUK""iS̰WuGw%o^?~U޽fVYۙ=I$k/9vf1|%gS4a3Un'yIANg78 Ō93˭I:F jSL9i4`FIju. P{4;Ө<5;yw8mgmgyO@GX9ϦБ 2 @Z8LeR1EU(Yd) ."B՞$'^{\I;-ͣCO%GR$DS-`:e2mFD1Yr RmGsv%, Ƹ#aRa)$n$ }2%dz"{|27qsX| >Zy$I!A3/e*>#(KL4RE^𲅗'/BJ´`;QT1->P Y*XR-揸7->ݰ+ߴZQϒV&t|\Liَ3Ӄq {/aN(E3fͬ0x>p)TpG\)W|Gϒ+͚OW$DZ@"8=gtL51*0crm6F-KSʕw:Km@_SGJzlϑfA]j0)آ}*(=94va\G[t1i&>ZGK!J93jg>OLj9+j$Ɇ:҂S3~\n\m،{>6tz ·L๴Q~=t,~9}'uS_~y) Wӎ2hsGz7&f$TS'=CP2e: !37l..9XT6rl÷P8P#u+םqyصt`wgP¢)&iƄUwcܵm]+m$kǨEýes w8>xE1H/N5<93)Dq SUVbK3s?-jު{=l/k[F8 #EB!9*ʥ$Lw^3%`1VYE 62( FC`DA 0Ut4WπSXh=wfN]9uNQR+ZH%5\'ƞ3?= ;l܌1vdQ8BBW!ɩTH~*$V P@)aL`JHJ)NԞ%e+OQ!c0;e&Tg WhNPƆhcl%˧9nsr7,x^8\@Ű=79mal0fV1-^=^UϬH/h `̼71c 6=):bHaSZ (7TD5I!R,qEE`bph (WKM)@;fK)µzqy0n-l@w)_w:֑( ּՕѪnoWluk[keEsmXc^`b2@?1Nr8He$k/0m9$Z ݏ[@l3 H)P*'Tʨ4}SIt 0B'Ll8 ʏJ?CQHf}B/YY KLהkf%[i{0qY D#Sve]ϵ5mHAƞQX"ht^icfG4u CXM$_ᖑe]46 ^j˥ 9 aIw} c sɪl"7a| FdIXZqCC@C_HVqſFNn}>'GIVG{YR'lıK@H)D`v("Cty#E~pipg{ Yy ڊN۔krTa5Jh봎BXΏN?|muWCghrQR꯲\x12F+<.6AU__?a-$gP*_7L,Qtx<\kqM~my<8{L 0'|c{s[,ճ3Bt?^ݚth{!Tڞ_]]7k_*o }r#F1~]wD}zɺ^ޕ q:9, F.5}?;"~/qgPyjTa~MM,:?_w.ez6dIt $#ҁ1bqǕM<UyU+vu$Ƿ߽zۛo{{ v꯯߾¿LKJu$]Iu/$ x~ںkpS]CUT-Ƹɷ.|~u\ߖf\(@:w4)YBꂵ:"xZ#n%0 b`PhcF__}}fG!=;׵8 o_RgC`G*uYNBJuyu>2<1DQ 8cZz9"*)q=j=6&vDvv(]_jjloo,ɐ> q׍ r &L^&L2J͘",%5 Ic -.>Pu)dZvY4a@dZ{1[,(lbC533Ԭ\v>ϊͶEZBZs9EZe)4ׅNpKp#f -M.ϗzzp0]S#0Q2sHR[$3*%_ D lSeg@wO8.^O uX gUZ>-bMý E.Ji7{K_EyޝmrgϚ<_/Zl.Sb4oh%x{p_k\[2g;jFC6"IjtƈI,!pش+6q**lfp@PZv3.M {M 2QY2(eZ30z﵌FMͭ'"B֎ӌ98)'I|a2y>J na+YN$e< !,2+m,W;HKi&@d.wmeRO'OQ-/Z,S} xӡ:<_YUY#ʤ"eShkUMC Mt}",N(_J2D4j@9Z+~1L*JɼmZJ7wlLZ\g8$dP}99~K/v9S{oʛt:lQ%!qPx5/&ETy7:=(Sʁɹ)2gK%FQLrlPm.>* 롥lX %B( R8q=K96b!4Bc^^zo1G;8nߍӼ˃?kЅ`mpYM @V7g&Xja@J1dJW XKJt[;]!+cTZUԆ?l϶'/,7%QɺƈL͈Güd\ j7ǂУv`מYnL'OE } <ͅ<T&e[Râ']vI+BS<,hlH!^Df؊N,cM&"aH5c1d"Q͢.Lx85͏]#"#Zfu#K}vZ`<`wIn =yjBL+uJřcr@\JlI`%$ST>ª\l6KvEM=.5d'u]QKn<)nUg7Vt}VB|)xaD %`' p* ,F{*}0EzZr/ן>'.;OVtsӾ2lsͩ2/aS!hBoY )!,sVV(HTIL4mC`_Bhoj>M:?S}ÀIpoNQp/xF\sYW$)V?-Eh|lB$O%J9乗HF.E uQ&:XӬXMC`\;}Uej+/WC2~Uz C:t-֧b"& @|pErRy5-Tī,TW,]*/^-^ЭEz/A rwnRv4Zbs=-V @ףنyrհ45hM{N6j__go$rm:] O(v#GKQ(68JD[ۜ{r,#w,s,|0x6?Tr; +}h\2 ID%$_,DS(uAhA(zVL*l h@YTt1S(:[A"7*4II…Po\m&ΖRNjwӅ꘼;\*py>L;n\}dꃥLJ)o0+C6JLiuS)Q!K<tFVBp*xXֶG BiC g T$@!iNi$))yTPB8 )0H˽3X*"ˬ>[rje0(ֻ(('JRc~"[k48"|\Qc&rݬEަ-6 n?`M _q?߻EMw<YȵW 2F&J^1ddf&#ON.1#y7-BoEֈG:!Vh2֒פ%$֥#O|=Ap|<0rˌAZ[ޅH:TRYOH1&rVK ѥl 뵾?x{_ ݖ  mUew7 8R  _xLz'3P:3` .'Ҁ"b(tRQ&(&@0,ڽX$^x|Q_߰Ky>O5$GU'Rϣ~zc,?<ө:]m!^6$o: y &Մ}kNI?]̯<!h"/!m)2ūBy~DOQ蘒݈[Z{s }{fWE>>(/рB :$@]/B.HB ߱転P*Zr=zK;XD:܃onyKnAeQҘr@k5=&@t2N"WRUh;HjC>sbz@.8 DPVT%XY_Y߁eAk'_}uI_)fYWey ,neYNjf.jeKcLm.Mq('F9'7 bt?@/7s bm@73#\441y( 1=_=_Jٜ.*%+Y]V rSNj~]p2&t7IR:S"Og%/ h$x02ѭfNl;:QME5XMAH2ߤD(NJIpƁ HN6eĤ)V"Ț?Q(`'1dR-1[/K"CcL-7l9_Jr:^ZyǣiUjakNK^x,z _yr Cс%6:"'K!$mQ*Xc t) \4:Dz``jYR-ɚy`{1b%V2X0'BtBUOC0;Wm?H1eCtdQFM֬1,$$H)/"? S?kiڪh1V )3h|ɦ|IB*)yBu2U ƀ"3|t]!n`]TôR(ӟ k܃E9-L_,~䙳EYsիX}"49} BXr2ŐFU7̆~T0̾>U>&kE#XFd1`vYt;U.dն YR \HH!h2)i+]{f㢇XRԏc񗗣j0%W"+L/&iv8(+=䰩<ukÕVtULwi e_xCZ7/i- cVjcaMޖwb`M#ޜsڤ-]&z 5Hse~.,] z0* h .q7O.F:%nccw0UsqVPТ ; 0`߶1c@*2SV~qVgOގG?;7"D2;SFe Qd,ĘK1#9sL1MmYhª!Y.!sl(#V +Ml%9;L`y*[!L9&T'P 9qlLw 2]6S:r =o8mU@H f9PP=$7f砵~ ϚT~+oqХwx8܍au 6~5~P5__ݍ['a;qə^E͙f8+Tz#iZE;v/>ysp?gxB}.%  kMHmIm#jf1Hm3k0v~=\^{ß[~},,^Ԣ̎'i|oPfüWo%нgͨe9ݝ YcKV&;_`;/'Z8]fRMjb}g՗DgEe`\f2TO*=`NTi' ~=hȃE!Y2n X^z^ʽ$l)BV'7d`6#Z7Z7ېp8ٸdk z ۭa8f€{5+A@j69@KFvU(*,U(+tڂotud(ѣe껀UwpT'Z-z$HfAS/e«ߓ.bI:L *H+ 6oTB@)@UyF=> M9kiJW<%#@ЈiN:.}4rJ mN UN7+"Vr#ڳ"KJM^wuC'O_&M(}ӡւ ,I8kHTQ)5Q Df[Fx[Yܝ>Ru+D]+˔B, ־y$F*n:<Rn~X`^dZI>" H*L2\gEda-^qw0ZD\e 0-sz3{R~< zD \} 'P3˜ρlvT&}c[Dȥ38)NJn)ɛ}>=lw n,K.:@f$yLBʨ-cB@ϒT4+q*nQf(i.巄^DɍRIkR0-}gc3vQܟ5i I#?)ɦt6OnxulrR$DTR=.MsrV3k/>rvœ`<]L |S) ީVM'HF@ L8C-dZͳA 1;Zŵ ^ǨST3jkrJ9VLkkSd!`,M."l&9449W3)5cg\/G) ;Յб.[]xpQ8[|Mc#:z2rr~sC?N%Hyb#OLa DKRHR ۱ƖVfa(OEmJM!hHFd2{̺L|##fXcw;Gi͸ [E)Ynt.'OY\4͙VpJ@&@-F):ՇYicD/(BȊ 隄,jFMKlFύ),jIrd].팝%B d<嶪˶'+_ծ<6qMќh+#YVJΞP°OZa׸Rj BlK}ִtY#GBlr[/%W?t<‰WIJhmuO//8  tBK_fu! .J.6Ldd@e$d@}G -:d&,TyR%B*Hz)0i=ꊝ^$ aã֟35ך/sq~-x`q!I˭]1[qkùszT]TSAR,cE΀YA}B:f6݊{tL5zo+xY027Vh;XYf Kr$ obe+4!8XmzwQ*+1#r!XD ERN(3dɱug\sy97t뭶לVAhioҵY͛m'Qa,:nk9]###$0  (3W.:3eiɅ$,|%ז+d _ti"9P[F+S(Q85d]4XNFe޴uKp`XOqw7>[a=%י1HהTT0A'IBRJ+±1Z!v?v?tAB g'4 4::"DN<X($_VP J];eAX8,F\80{`g0#sūX|"8?׻qKz?,TazDh 3eR{2I`{؋ؓ >Θ6-[-=7w)tFjp-~Zͭ]' Ke"3/fZtB2a?׾Ox;*_պ|~r@t'^&"xJae&1%a䤿RwG{HM˛`~aog\+/Qn5k"hwdz:jC; `)VJ.tFFQT@bp4u`ŲGÓ@OW4eTꢓmT*vfIuZv4p!!xˣrxw-j`&j?K}S?Vʏw_^ҫ44'$sOW$KhƓ`[;[f꿛.FMs/HH~Wo^/Ǜ{??yIk^fKpe k,k #Ͻw&m4[A.{=u]v/gJ[V!`@_uٍIJH~*ʥ6i,q`~೘jsey\w;?3ZWyF%g2s*/U4N꬟g_9F]@HfX`ʨ!le6Qu)9f$g)#w"ɲs# MXբu<$v6*e\8][o[9+v}$x CwcgErKr-E[9;EY"Ȝ@-Ցl̔g#1PU.iv{s*ɥW&fnriһ\E] Z={[ [NxF.JQi]]Y{xfoYXs#D >bMA{>u*2%rh,qGDxFL0dk"JgDmŲ{i߆ɥ'[hS=LHL pIP!bf6 F- }E `p2YNm ^ۧ~ӪoVt 𨩈_gƣVl=:Rhz0)Ͼ|<%avc2 &ř[2Tj*THW5Z9i󪢅 !O%/ "f!E`9I|ԘqTuʬ|) Bj teFz.1";.ԮjTMw|iW]<]7E-?ZqICg{0}2$)Bw /A l. Qe3:AҪ 2<^%dU9!l~F ` reZ3*^UgI[T4iMS 9N^rLbj4h6ںi$P}4}Ӛ_ =e<],GrOOuGDRcJ $LyV+!2qK[B.cUdTFTW d6%L;1[/苣ɤ:EE29%L U׳`aZ,X= $*cݖ:7KAӯ՟|4n8^>[I Q2#I/d >r*JDTFl0C\  lG0jad1c3ɝHRfAʈ]M͈ǣd\ jW[أv`׺<3*Wu];ZLIG<:)u&KMf3D@aJ$!G$MX J$a"J:|$X D]]85nO"Gr03VFL%Hig&D#B!H& .C341$YiQg|`RIq Ʉ@4i81j'ө&zM'\6rYgUZ\ԕqQo4xi'c ѹ`L\Z w荷rK{\ . V[CSpvD,}jȭ5qW.^qq CޢSS/H۔64R^jEv2!:cD&kBF& /އ9<Ո@s*:/.l)%!>{-#sH̭e9B?a;Gg鍄j&<=!\y'ׁpzhz6_>Å-娶cώĎVgGq4l'“~L  ƀ&c"82S=F#CEJ4Xg4GQZ;, :I/ct\#̄V2i˄Ag׍\HYҗ]f*cV [c 5Zivjlf>!zjgkaӦgzF|>"'\`ә-9|J>zy$ xd$tU^VN1%d`F˞^De.fs]$h0Y9K⒑tb̃fs VERjY^-(--/G9pg!6n޾zBm:S[ЫqL'1oȅ>n n_Z7뻧9nzkex?ͶCjP˅ ݮ{Ozo.y=/\&ﮯLz?mVWԳ|CѥOyx45EXb{]ZosPӧ̴[o6Whn?. h;Gkj'oVmdm<*Xba-uxS)F>BJx8w)S v<2R.bI 1d98DzT.At0 slD&PIޢ QF1e2Dm\X% $n"S2(ĻLƿmS:xe~ٴ>ln=x}IrῴéFls |$ sZũ!ATɡMNОp{D "-z`808HMs[GHaę1'DN D! sGW R-]V+syJ|>|Px|U{< Sl'l-3Z{b䡽lE(=HC V$` opgLG:3>Sb'==51j|)suApxT:YH@/ )M=TA]uJso=##k=qRkA"kB۔Sw*#}Λ"0W^W(CjBg5C!Cz)m].}_EFO7:7F8ڹ蒴: XP]~EE,q%TL:#W}yj0)؟M7v?׹vt= 3a"tOć+;yp3ܩkg:wᅏSNwQr8/dp!j__.0;b_xC>PRFimuyJyQK2$!G`@2({X'N<޽fý vLȘi7(3z$ŶdPDIY1=UCjiv{s*YsY'_wX+ܻ?I ?R'Nnu_~IZ~]&r64#h%S H-ڻ [=kEWsnt]C/>B! N> 4k0iuI!=ObCvz7yR-;׽K`~Z;矧n'| ~/Iĕ JV}Vvr'dpy2\߫Zn|EWEZWEJzz+plG"u.QyꟃVG.UKle+\L$c0wF &ټiI9u WcL}JvUWVFRGqή@YS7~܈{;SM̗xVֈjD4:Lk)|l+zG+.Q -xv:11ՙ l,MIBjT1`3 ӓqYu-@@4Zsuʞ@mL?^ @謵(\ˏ/+6|ΪƺrLr Q2[l}Ͳ tIsKo,d)N'NfY^&,EW$ۨ%Ӭ& X- '- l<%Bq=%"icDZ^(%mokભH\UURBW \h ,8*⚳+V1}pUo1POL@Yodu</?[U}YWo{uA "|fl"ƸU\|q-;G@fQQȳ`M4>vG絉1SG #UΓ21 /ݞ5쳔(<˛OQF3^t4_"ggP"sٳHZODH ߳=8ՒDk _n,\!K:h'bn3Adɸ+hdE%,wKw/ک#ۈ@s#/uR4!+)KCH}Z*<`ȹq΁1>8>i[JDB6*Z鹎I8o|师jlkq jϴ4NH۹IȖ;>2+~Tl0'ˮ-is {Q\K(!u%+c4EhzL Ϊ!66Lc@B1f)dz ҞhAvSmN3)K#xqi`yN[& :[7r9.EeOʘ 5^_fkVg3%n [(KK䶐5vڔ㔃Sm4!t) 1Ṡ7_m Uyc#X:'YzY= AVwm~ha6;{g,s7HV֍,y$y[-?Zeʖ?mM!"$M)6rQs@b-2e.r+ Z[]^ZĀhtqܗ\%Yr\f!e:>^V3gawb`9O.&.{ ӫ&M..T]f֡un\c![eʀU󝋞\26V;Fn4m!6]k.BBE> AS.oJd,(ggkڶnj{%\CfxUڝt2PvIeߡOy{58K!̕M<R#gBJc$*p,DB_'iA^.aYǏdgh8Qm*{( xPEIx!{:g21#s=2X%3FcIV,SpcLDIC2ںzS-yHOzT=Uvh{s(i{Jflz̈RIDqr1DF$Go>Zx/xy"2+wN mP{} $T+ 2F rSMx-7~g1q8UM\فm*aflr\)؜ =:tP{,oѮ9ڥ9s"y2x=鱋s={-"gz|R^dׂ޽9>9`|Duէ8зK+=hbWpb=zӖ5-k0 wN5Ʊ" TeiLL3Kp3nYwq.xqi"qקo_p-3JhR@+ >x0 Dnw'Fn}]fɧqa,_k;Kh/Rv=[ќQs3*zR`Azsz|Mq]t-Jf[a 1SzZ!H]nݤRVSq&8eeLtM `=C{# 1 }R2BSܤ!yii&9c :G5r2~cBg`k.ժC-t[;0A.gK]Qd_<_(I4:qL\Gkڀc!PMd.d4!u[RQV&+WW<#yst1xs}%%8:|c&;\-&hPB\͜#A;bCoUn|S,+yE,_Y*1=ӫEbV'MJ&l9'$dZc):: B꣠LF' I(41d9e$( v0ed sz*i9ܹ48NN>q|Db 3 > .Q5Sd& \V3g/A =˛#s ٦c7 cVg=cf{$H[^<^@2VfMQQ>]ꠒ7pN6*2}&ԝ=t}Bd= Sѓ`ݓ>>S)Ȩu4B c@I*C@$g B"8T!znٻ^舀GShG'җAq!{B{oCU+_ESAb 6!7*>g9A!W=լ;j{Jucch3E^ezW8-A ѱllʄTE,t@> 0KHe'BpS.ht*e&&ȅ` F&' .D(\Dy3x2fΎ*`ҿvX2L M4um\]-UlH6Y.m/zZn.i.r'iNY"̵O^.qY,XpuL-c U&.gq+8{. $bb(Ւ,B"xQsțws2#`v*y=v<,vێS._qRk/WIEtрAq* YĔwq{sMM;2r9(IlCL\IxIŸ1^4,A`@;hoZҲ@g{{ϿZ[s+,A`"xO|5S&qKUrS BC^.+Syg!$SwdY;ey z; vrjMrqf3<ٜ | àoca2oY뜿iMCG897[IASV#^ .BJD-ey/ЀGI+'iUL6/i8)nTo}?D˻2#.cqo}c:qmy<lѲߴBu/۳0nWuD&鼫]լww4 ǓGQ V,3d[o? @e2ak?dG|﷿\cVTߪjUs^ K#w׻Jnr:օ8Bunc$]܍6x]VT3yYCmvW`no]c*ҼeaT$5/u^s~fg?O&&7N"JD;SL[J";dc]r3+9LP~qWGVicZ!Y%8r[5d\T8ʤ N ':H眽Rg'!PU6JZSlOlE}9ӛ@nJr@G(e2_tW^s1X1YvSg["Kr*0́Rv ^ lG aIFӣSf]hYl{bBP?HkQņb+.VwM(%4j$D ]_p8P֋a7 ਙOW:R(}0Hu를y?/gV|vq (Eh gO5P)P>,d^J*} B>`ԎH HXr/=!Y6!Uϫ[%.NĊ=7k^Dv7݉on>nZ1t^ڈJ< '\`T;KCU[r&Bd:w޾;OoO{v^eK,i Kt=y!~㙗Hu9oߚ `FM9fiX#X,4V(Dqr-Y{b&SeK}g9 zϟr^D1p 2+'R&BeHM]0I= DNYPߡb֋rB}B5/?xBiX!tNq59J?yV3g@iJMôc9CRyd8Cw|znuR$zmV>9o#wZ"E.m)Plյ;HEW޾%BL8Cm4ug"sKӔmd 2z%L &P3g BӁ&YBn}T1L"V!3*|a/T}}A}Ew,{..ˎOϮfi_Єhi4|BM P(Gp`# Ә"drUijTǖNfEP^dB6U4 `3q;|&L 3K$Q˜;NưF1kWU{1ǘ\9Y } ͙сpZHm&@wì ):MC/jB+&| KFpp"BB%"dZk[c׭Ho@1yH%KX#ioo{X#Y=-id\xՃf lkoDv`<D\?0"@ā{9@̚ƲZP0&U)($if$UH:B,CLWB|Pq&fyR^;bPrRV'i\Ǵֹ<¸.\4 ^dcGՒ\m5p)b$М% !/4<|ڲeFGM=[͑ cc(VUש~ - ʡ\9+rPʕCrȃ ќ~(WCr(Wʡ\9+rPʕCr(Wʡ\9+rPʕ#ʕCr(Wʡ\9+GVoވre7xN&+ߊre^[)v(Wʕ3+G\9+PʕCr$ϐ/Cr\9+ʕCr(WJ#ʕJ ʡ\9+rPʕCr(Wʡ\7pg(Wʡ\9+rPʕCr(Wʡ\9+G7\9+rPʕCr(Wʡ\9P{(Wʡ\9+"ʡ\9+rš\9+rPʕCr(WG6lRa"N'7KTABT$yB44qJ jFۻTFjmfsDp$6GMmVZ\_]w_ٜǹ}@Iܲ9KgRoiޮ)_}ʤN_]g嶆|u]!`GĎ |C}f`p">Zg[ɣgOKN{r"m泅 8I6eBU܌/o۾'3[{-'#`tq~n7AzK>FFJ1g%P+KKEW|+3^'H409 z9堗GJ/[]AH%G*!Y`t--חPI '@|BhEX-@,/⺗-uY4ȩ7^Ƚ]_Ou%78]N_fWn?ӣ'=zui}p_?rүVN?:9zOox9|~wW{η}wn5m$?rxUzL'c^4??;󯑟J*s+ s?wk=³tDgv%\X$ 4Ad/Fsep:]n@g?|4Фck-fU"(Y4 X1z0I8HAyM\ p8 H k˩ؠtY'Et52Tl8fdᕉ9pe*<]^]e5);mC/og헟>W~[d9_goeOgy5$+'k1x o Zv P#JE`@gA70!BA B"j}HZBYjJ-æML(=^AՇxumnW[?\ƞVg5)xϦwCb+$)XtkAac=he͑5;AC B_߿yR6P>w}(>w7z\(?Pjw orOg7/}_! 9jù}c{wNXI/ٲ/ݝg[:cf2i_˾Uuaە9`Ts" iChtsncQ+ᡱ jt*|Ot7Gk^ߴY^ %MٳOM&')Tr)9GFJ:G =$#$1F*TW9IܤdJɀqb4rqh:LӡD}[Xv~hJeߜlZ7RChl)LcVU(V=qU +\yٶaY,)bFB9rMMV !jPl9G<'#l}hϽ=9ź_|^1^i! Qo} a0YB6Cl\#>,b?|֝q˰9yB*O$8%nx=ӊ݊ъŊĮ0F ); %R U pqeJNeLtZMF,jM}CPT0"֮ L-brǔ!2䛳ms=wA} ^:{gw ̭~Č竊sֿnˊЏٰzFf·(:O&sn7I%}rVe=wv^è٘&GD/*N7WSl #鿝*~i_ɛxsY}y}ڇ?=wRy-LCv~5kWavuj?wꡲM**sgHSqﭕje&q5&[k +;E6wND7z.F8GGUSJ$[\*mYcREGۜN!NgGRuv)6Eu&/&dVs2T\X Iϐ՛݊S%U-BF)+CgL@X X(FLa : lnıDumf@|Nsj (h-j4TBҼеJʌWe` C`g5tRZgΆLǯ1TE#P%֢ cE E5zX=pXئ㤸2F TuWjQ+Vh0eԁ}4Wf` ZYX+#f:Wcmi:-pX.d2kVɰ,*E.;|unVM.9?2cMS4!,-B&vIc#~a1,UɿKbY:3BC,A 7T^3Xә Yt瞬Ԣj:QFS…N:YBEGMPR4eStҴJUR?Ť ip@'C?h,z݋gZ{㸱_i l$Ŭ{?H 簪eK-3]U:,EIDPA LS#Ix^]-ՊPBBVXɑxZ"j C P Jah5m(F*ڵR;&2հREmQnVy=hͨƈWhmjyNPHAjge]"ΠAL DimE(bB;"RF W !ܠes5oX`5́g;kRX 'ʪE55[7)jAW 頚:si@e J hPRR [bq@[Nx(-BfI! ySN%T`|)']Rۅi3kB^Uv>N:暂Lp$Pk 7(t #IɘTj0 Uӿ7mk4QGuZC[kp&`QA)zQO4>#|Aˌ!n AY.2"SAt  D5F' J1(=ja^úDc5BBt7@V$bJSpr( +5J>7CZB`2`)1#0² Bf@hbek֨)kD%p<{!Z6lHnT;]|[vȎ5d7& 1D K"!K#X,Q늄ީEDq1yTC#rWYän[)S]W 6_*6&15fY{We XT16R)e!_ ܑyn@vmvݒ^ו/n`\ba#aCZ_s@Mb@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&19ߐ wdU% 'y=LԪ=fx߶ݒHȽP#TC (b@PĀ"1E (b@PĀ"1E (b@PĀ"1E (b@PĀ"1E (b@PĀ"1PR: &DF{=z&T@O  =1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@O ">14`&Uro@֊G^)2ʧNL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&a]XZL]ZoIYjZz./14ݏ| 3> ٣# 3{C\'.!y3Kĥ@\d蹗= WE`&\A\پ+Vɏ=\)%D;5G {!&\qޗpU}"%Ÿdp@# WXH7᪈kjT ) UWO1\iBM**/H{wWEJGo?pe~M*ڽEZ+H)pÕeα}ҰG᪈k iݣW3z>+ߛpUSr\0` WV:'B~){1M'0b:;|59-{zr~W´烳w22R]R TS ,;V~>}j2yxm)eǭzލge|qo4e_Φ /q12\Ը+*}.5Mu+|ף;xU̱CpwpX|#Lu?Gu:-)3㫴vA)7GwPsorݞq>17K)|L.V5Eoi= 7ǣ4C6)Gu͓ ZO%m-xMQ/e%tikt~H؟F6Nusmii8;MVe*sT*2_պLhDm ʅ$W]P9]+D}&j.o2-_u\Y+d{*NVI)G8hj?ihRJj;.ZTO^ވ[6JZޗGm5Bn9"b׼ v5koa OOm"_jk}a6. aZvGWO,A*/ܥ ղM;A;.^ʣYu+0n P8w(Cy6Mn[޵*[މ.o}|P- %e&/Zp2RR~˟?ūˠ˥ < .kɗPֿ-X. e/2~)73͆M/U'KѶr]Ժ J?LMeʨmX[߱mv۫u8 봑2e U>=nOju?ఝ8OU}v>,id&J#zݥi㏺/xvxWxt{^>]]^aO=\=ZTPӗsľ|*JW>)Obǡ37zz܊MڼҔ8:ysfEwk6O08gtEوca;TH7ι<}~;>LH`;mL\o$o&eO"LiAaCo쭪;i2k}4TO߯j BB `&w+Z9F6NK0nPqlR^Q.GI,!<oM&WFsi_K@:bZ|.L~MvW.l{ i0ȷp(1!^F4{h )7H!'hP= W5?0y/J͙x R K0O1\Iۧ = WE\%\iݣW@K(\=p{ELߴ 4FƓmN0V`հxѻ0;G%`wjӗ_v]F{2:O_B.9+-OMI6e#?Z=9uwսEd8txF_?e,ubw!HB Y{x 2TZSVenR|0)qߏϋ;v̕.ӇItzVB3;>룋c U 醴g/|";k.vr6NC*GEI3|u()z/E u=H)%O(Ql;XeFζzq{˓\>S&bM|jӼe1qZ3iR%K9 ]!6yj;0myҿ?ɧpgPh'jw]4CrSQ:̻\|  '햶m7o6SDt~|Ow*>b6sqɳեks4N&lh16琳MF9 \ERBexhe6caz}`q]q{ Jk[S/`˟"UW~J *ˇF-o]Q/!>Mǝ{\ZI}=;%dx^3i1x=[w@abN\6P 1dz- Zc@ wG+Re=!uȑ|tH7w۔kM~/ގ'-RZzC!L_v%6;ܦv<1snB%tL5!Fkd *$@I1Ag&L(VFFrWl{{Å7"(Ld_]ko#r+F>%AHE\ .rFJzSlIqK`PݬSzPoh&ΞY Vv_EsQo93O(FZї}ieND O6J "(KR ɒA"+PAjFDxDsy5G` !h($m6$Et9%O12"`(B!4xtrGEdkr%j>ZLY]i('Jj Ә1!ߜ +c?}X\]`MM?35f= <: W ڠKb!-W'e LL`6s5[\zgD yLthRtJGj'JIYk5S TҤMy 89޿ldv=5_zt 6V`[8 %"l-ZL(Cj1ġ]B})jm]C|_jUW'}=Xŝ}0\[='6O~Z} YgMWGtm l{~P١iN.4O0ҋ']?/WyG:17&' 9/*ڝpgzo;m}Vp?5ukڂ+g̠qoi{)kffpfڒ#J2iLpNhh➘Y8i)5.y2ߘn&&+c\A 6cH` " 8 Y~&Wg`*ݜ7Dffs7VݬUΧk})h]'?2jicۤӈ/eXjv 3vWO1 X`t> vt0tт4Tc^y*%rgUzO&E").Zɉ2b12 !;%^x)3F2 :$$˦#9PSuNlb0ʤBؘ5g@GtD歲s]Zzd+'s:nI9[g"-TqA2մth87bGJn hŎt~ֿ ?$}EToԤAEjpٞv@cԱAz.8aA@]0sP):aވ>8/Bm1RimB>PŘXW%_@DQzD*FNL{C)Tma~M'߱+p~M>FFS[舘" l U 瑷ѷa)cK%h7:~.Ly\43B"ώ_MN36 xM &7!)%PNP3(!xY92Xū!'\31 &? 7;HFO/FUQ^uDQ[M( G{ހ B&Ȏ$]4wEG6 JX󔾹:w Mo+b,GJڤEm$}LӥHEg/5{tA*6DbT&kT>әIQ>䪮MO|^](𚐩-==9y{}zw?ӏӫ/Ӂ`*߇FqVA!%máfZXp\ 5c}zDĬǻ9Uvc cQ=gl6 U߽a|J 8^LmCpٰS)M92ƄiLEi(`r̤OIIH)ZX Mi6D^[KH,;k]oH=܅Ūe>z>Kc$iMFhDپRac.T;؃ tNm4%b ޒzdͧHޤH2 \JU585Iq[º[Ce}ڦXi+F,KD[Pp{~9_n)t50!L$^A'ꀁIN +Qֺ1LHs&$ iȄ4GaB QWYh$@udV2eZs#d̊m̗ۖ| 8.K|LXE1CuT7JP4&٧;PD9aszM 7aѨ \K/7 J6#il  iCs&1#_}j X˃r$ޡE$JDĎA޶<]xj_Z(%#@`Qɩ> ^{CrdJƆ 9cL]WeAs\{p5?6bZ]$HZ'!Z%U GǪjMloY+P"Fm+YMj5Pp+CP,A'h߼}B+>f‡ϖߺ:L`yw}g-fD,"tN߁l8pB;x٪,K*pz([PZ#c3qv#c; ͌Cc;\Tf<\ݎ뾓oi_ЅM^m @NLɬ3{,[YJ1d A.`@5:FUMm-d|vm;ĢDE$K g7bǫi01͎cAmQ{d[/^ϰVco>S4nl>6j|fUfg3ɧ r]ΡtPtF١\@c_2Y`>H`)Yxv]P4-(w< ^|n d1Q [m|,AQQ%! * N) h0B[!lLڠ2@$Y'F Le6Nbv^Enh&ΞwWzPj \־,PEF%J1xru~6Z{7>’ 1t'K!L|Ѯ(}ͪJGb1%ouKy ~낀ՎI9K9YR `G'%.)"զj;+(V}'T-?CSZU]M&>P}?H5eZ]YĜ 联򂭶JqN8_kiGZ'4OK>RFbJk6ctv1U ւ">R#Vӹwn\zpFk<7t(>=W-O'W_zoc?>Ta~l~^/e}wv܋sO pȜyueD67](~;Zkqzl_4)NnO O4;ZAxn l=Q TߌK!=-.eoZMU|)퇻W!S> 'So-}ۓC)x=}GhZZ4s_p=7fN@f 5p= 'hu+¿Z,.VIξ\Tw| _Nbd"hY=~ӻG;@+1 ^ W4t5|#/9Ujkm#ELnټCO:szf4 VGԒǽ%ɺX%-`;QQ!\bPb`ż@mvomխ2y'Z+#ҨV4WHnX`s՟Lj^/. w*Uw=ޔr_cMP^9\ 4Tewz:a`t3=1;~VClJ%ŖJLOw|wxz6~~>ӛ2>Ϋa ? AyMkBjۛ75h&{=:&\oB !~4\mzy ]UGZKHH2*)u(8TVpn(1'o@y p?V]1}x'Fʰ*Gv|`0J)p@IgIGt Dꀏ )1DaqM'?#aQ.OdnwSL!z#@#j48YlQuDt&.fLo/66'4%YCY"ȴUۉNt^tlC<1/%tUU_aM_q'AD ι+YR#[DDsb~SgmOq"k{b'|mH;hpAb"ْR  F Y]%U{ Ǡ\mb&b/u\hϽPb s{ -!Y6rO/Omd7lv;T >J`%UWs,S,bˁ+&WY`E \ei:uB)%Q-\@)|gWY\8e0K+URp rqf Xi; h%>,~ U_8Kq/bї򨴰smT:h e)ThMᨓA^οlsI.9prD>wOoJ]ÎByNq.eɏvb ΧA;gHS y ¥D,I 7BFf‰f `+C8̡}{)~_ڗaoW3K_1 .8(r4Юt7j҄7WVDTu@Dᢶ @8e!VK ɾOCcWd]ճ줷hbE"TYhŠ}B,)0%6(F5T1dfGH'X&Bn6v7 ,X'Olo9fG]?.&SۛԘNsil]9v#bѩ 99S,.s9*qYeZH#l\ ehvgm~S{a< h /DdeXl֯v>yQxw5/>FO'XΜS\CWwTl&vMݫ#߻zK9h<%//<~~sV?|YE_ZytsA Ru:朇juL@[MH:?qMN~^uBpNccd V|rkI2߀Y,X G%i&&bR &N &dHfEy:鉶6Dc<9hܔN鈂wD1_,U& q>'rYS7gSYVa7}c,:oa"jVT/cⓗQE &Zr8V3k@9W-"\MZ`x`<HJg,"f\DJ ޛ"BAB u4 \ppB߹>˯qHKBU8Xo}b]k.h8d2KG5-ħ &@6*=w=XRM"FngX^Mm7x ~lVZF8rQ>G=^sA"\Оn4rH=>1kY4t% vɛ=l`\tzqzI] )Ƞ".dY@HiA3Чb'BvX9F&C<ã'7 /$ffWW'=\}lE>3?/V7(9^bө#-lؖ[LTYJX0Xqδ9>ҿoޫ#РxZ!xo%xm6XU.1.kEp!Q ވȉ&Ġᄹmf0|#r|vh;'nո>zƢnǎ >:)<VzA$HdiXdL2gSBGJ fCxCErY#Tci$:᭶&H]B9t*EJ؆Wpc^y7sTalxv U-TfbZh5g,%1P%#^uǓ4jRǍ 3[pq'LJ& ? )N\S! $THf0,ԓH q#GڙTί) 9 E8T6$LZ{-Qdw o/!J.ϓn/KII;cn~jz I@;}%,u,Π{ѩ)[iS/X:eǤycQ^i+ᡓF**r9.Y,P2=ޅWcdtG#u972SP FKI~NhjcNuڷS{w)r캎S=>|{g;ՊF1kBoKs$|BrKX.Q1ָdYL\cgZHr"엝9z?LfdaȊ%$'qcZh#m9 muz$E:a\9 c&&ȅ` G&' .Pho0<ճ;cO/y zNƩW+J5mgkw+f~w[ GIӇ,5^$;I )E(3>y3gic%ǝC9$#:A&bԵwuV. $a k$NKLl$+ XevZ/zCK~y^Xl1JE^ uoJG HkTIGP"䄄a?_\LӰԩp13AI}d8b HK&(ƍ c_A'Qokx¾кsm YT~{"ԑWV:8x>@"ڊ7drh>ƌs$,}7q }i4]e9}O.'{J7bZw{7dq/1:́eVG2qtƎYSPOh$<=‰g0 VN>S c@M61nvW 4!>7{[Nk/mq9~PCI3K4?jXW~GI+ǎRF#r`s' +Uj̶oFFԴ?S!} ʣrdt̏h:UW0Qt75Y)ߌ~=b ?~XgyY")`+#tM3^RC6Z Nj(NHH~O?*~߿/^~K_^} ze$^ica?e{1_寝v5Z:աku3]}߿W*ujqAV0ւ]-=]M(S,gj GkenO۶ﲊ^[y :Y/ou\8ߜq."DfriQId,;`8Gxf%giJFpplp>wdU-C.wsY}֐U"pI)Nml$9{ dsdfLtd4*)eTrUBd|U9oz{sȖ< k91y@@x)9T΀ב[RBtqȖOmL2Gc2\f;^D0'дטFTRr~6=(]&tU(*%TjU%D zPв<|P ^gI߀qWR=J%R][;?>?<ٕcU 2 Kߡ0RQ#S"%xF( B>`Ԏ٣T1 ^z.C3mBPɵJa]1{Yy+ƾj|iAG=淚#y"hKYe$ښ$ЖHdD"AxF{dwS-ŋ7<^o v^toiW;՗*J*MMEu.O,f*+2qr-Y-l-bS`/^ NH<$$VH-SDMb-n^$UOüg>3({S+#({š>kCzc筥=ǍJ7I6vP*l-F%!n%BL8Cm&E#DtOl#c+dJ0^8K*T9% KPa*ō,du .ZC%X%r19 ]JdI`J"MNhCuKE3.\ܶ%*\$BH^hۄ< dRĠ^faDMvCvǾxzC>]Ͻb٩rr{=#*z7D?>Q|T/ lvj__՞Uj+oUA]3TinGжARJ).T(FE1mQ6)Pg, /.eH_XτS~CBJecy+*t#Α +bOXc};荝!!tu 隚_=,pQc^$?nJἭ| *SHqb>M <=MW)E  G~Urrz'`bT' YW&aͲ?t9Inwc:yP24LiA WlҘ\okkSe&j~ԥB]8pv^KkzofJЧbޠ-Qr=ŻI[X=MçɮE5AjYQdM-Wwo:Ph}?g-OWs=)5l ҹ@aNozKQ˭i-v6>=yp`?I |m q^[)6o0kN㤏%;p+-LUрIjPtQEg*|Ărn~~|Ls6M˥G|8h#DΕ5dx &G XH-1h'm$p)'wKiDxD({0 $,)1eTFnNHE )y!-T&±9trwSZsTܕMSO{欍AIgcTً@GM Aɂ'sx|yBπ<_hdXj&Zf[B pcLDI m]JnZjyXՖ~d0K/bt0Ypތ^γ_gkZL2ه0!ҿOE+ۉ/хԅ ܛo7Z (Zщr]iqޠpm\6,{sͻpѸl!ky7]=%Md.nVv䇼N^t"%o^[Ut~mn.i琖ES쬎`ݚmD߱t$ I}[ O\}bh1NU|rQboxՍhnut\ֺq(wmWsI렉 NP !Ě"^E{fHIJ"Kٙ94|pGںhlJF j&&cND9ns|}3# xysqs1Z4C⪥%hXOc ]iUNBA[3/,V֟ u8e7=;yR>ȷWnf+h oX5ݏ9Nw2e OZA+/c?)B)G^Ԓ)Q:„^ę'_MI~Py\ x1 ޺LWۜ6<fx ׽hw^gip͠ j9'3lwfka7^1Bd8f8qy8yד>F *7?ތ>\aUK;jDbJ8+<Ύ? ķ=rL] ʛ"8L)F%D x ?ֺ#Q)YBAL99]H2ѰA$IJ6 HJ SkK{34tE1q6QJfw+WF}'‡Yؖ_[K,5+Sh1Z3U|s׵ٮPh#|6r@gm7b+"&u'R1 #,)[+ږqԌhcBg%%S󁚏%&RopC00o`Mg4Ax.f"KQCb(CbTg4g.[0|gﻛ (U.U"Pieω ջ!jv^5d@ ;x3ǣON'73[+wڡ$C≉\#"$u' q#< :6cTeq -CM\&2F" wϹ!L-Mђ( Pg˘W10i-9J3w640V 4RCEe-3cP1؏+6_P\0E˥pY(y&^EJcJAz+1K<`?v?ҴUy4%Fq(.*"aY@9ARGP4R7m"|.%bRn~q]bOm-0qjshO8-m[wqgC@,9M MY鏿]Q'7]fWch<&'m9G=OR@>w& GvzCN/C?Nh׾w{:# ^BVk"έY3 x*/u cz3btEп( Uo}G;ܸJ={>kBU+é4&mngObc(~ZqKH}4Rmg鍦ww۳Yo\yU_z2~r=*\?^^5=].lD0י^^k{?h4^$-]jnFҕ,N7=.YG^w^Nn^rUޕeQvفop}0 ѓWdWvOc&j s?oEngikgcs$Md׏8Jh0nN>!6R)U_W˝нB%߿7?|}~~uo.p.^śW_Z'qpJU`?^M[7m-47kEӔ|ioӮ-+vwc !n4X+jWZ5oѫlLU:dpR8#_>{u x$1+#\m_Vno(u@4;Dj, DBx-ќK$qZs%hH9#3$jUpL;$2}G^&\&RAdG,pR:͑Re2HS !TCJQlNXU)Ȏ9:hw"G{Ll)%P,/zAQ.~c]Q'abλ6*.a\-+-ǽ&Rq}%Uu1UnCgB6*Ʊ<+r|O,Pg%u/홤ٕ!T LhIq̇A7P{Q5Uc,6KvE]U-.F\'8l@$Piuc<%μxS.zucbcW<4솇$F1\kVFSHq>TFQ{=nz?>1 ]+3/x9 `Yii= '4_5~ʍLvE?>1{L>a7Iaz'+<1E-"%˭t<Zj:$gY_x ?LC^¡WC"^{LC.;Y?NwWlvW+U S{54?^Y]48OQg@:CI0שRSΗޕq37M>ΛH b5"iO-Zi]#suSd,zhNjFM?|AY>[%q{ft_i?O鶈#k\ M1_duz$rr΃\1",DDꥦ0+Cʘ9TS^tliD<|M@締ț孂/?5O4N=G}w .o,'P;OsIHy{,bt{_ΞCx4⌰w=x_M\;OI*h3P1N o7C'ך%so@0R'}K䲓%j懗[lñ D <¬>.EǸ.V,K,_mL>6ַd*$d7XY1t Œ9K˭I)X2J3%oɭEV:ra-e0zT t\[Òcْt걖W`NF\a"vm/p,'Y\}"J8!qSOF\%rɈD.JFC"}B*ɈD"Z^JTR((Ҵ*nX h^V?KϧQN0ϛ).fU ={aT^PJ-QɴRT[O^3hn > ɓ,ڲp.w]uxBnewi?M6=xV,]}RbLY„H 9Q!AzwNGG;f 6C нsƓ`{uw? L%WEs=?Mnܾzz*0!/0_={i&o=l5~K,4v0~u5TaEo2j aRO cq&*GEߟݾRB uU /Ff|7ƽO(mrcѸ#ShYW[-bӂzgIP% I%.p,3{@ 9k1>Ã4G6xP(!H˃B: Şl%VbRR\\ka +0ieJAd$o3Bҥ0=|f 3`zLπE%E5T*%@QˀYʀ0=g 3`zLπ0 .]ˀ9^Lπ0=g 3`zLπD(g 3`zLπ0=g3`zLπY"f 3`z0=g 3`zL?GLN+["X']Nx,'sqJR;֞iSi0 KM,YTRC }F0#yѷfķ^⮧L(k{uV-bVB`V&# smH!L)Ã;V ap) <a*5(K)2'K[3rZk%% Qu%l0$ 6׏M$)'9nc r.5l*xc^`b2* F̵4Ơ:R :%sDaRqG$Ã9 @}HwD@( Q*Z*͸Jb ``!AyZoqNJqM2/ְWLהkfl8h VZ),G,XmN3SO5YHAƞQ`N:3#PJC$$*FBB4W0 {Htl|"¤YyB "Nt9Ja<mNŒߧ_;]fo*gW[=TG{[1'~\Y +07RLݙÕݜ' BOo%P0 x+:m)X7.*EjxdT(R̵) j1~FPʃ9(q {e ],Ticգt1-NE]Ջ'}b̝"T\~u]NG.9?]O/.M>jh JcK?US3YfW }*#F1~f6EݫdUDsnuɦVJTӑi0`TeE:;gLTT#򦪖ҷOݱK!7/.$ ݴ%&>R FWSyw\uJ+*SLO_;E?M?_}}W0k/yqa/$%) lED\{MM+w47k*֢iwz۴wv}]bݕD b35/\JނoQXi#mmombuc~t0EdU\)iaIMlt߁̍:s`v0< HTE+HR489mB9ޑ$,1yHfyG8EG#sPo`@c(N$%6QK6GDBX#Q"=CBzx,; em,gd(D_'z)^0&HԔ1тFQ4`pH^[umc/`=u g)3sěglt~Tr\|פG 27e=J :RAAꌮvV+sڟM~R {'ϫ It2wZ8LevȗL(%7փ"ٸLv؞̩h=ܔ"D@e2p˽7)xKVsd6R#lyJBqTQ( "lP<]kZ D h5Jy\m ]ޒDŽ.E6yXU< zzp_*TL {9v^tVD_KL%4R%Y呪1BX8(--h9:Ȁ)%SϹthb~ UI GwU.5ȕ}ˡIS ȀohTPUSrZzu0#Fѓ%ϧ&yZ`92j;A`T"J#BʠiR9%N$OR׍%w'Q K_saYK8'b dp`%6ZcBPyl%@,jV-l[cCR#j-hז]>N,"=;җv! ݯ^aԺ(SVy0!)0-rN9t47qtrq먩" &91Jc \˨ӽQO ci@'x>mXćst9يॸ|&b_4}_MaB灯#RQL|P];j] Y;MVM H-~I FIZ3X;6TK!բFZVT7MgCzm7$u٠u7wf-M{wzҪziMm5^tzW~SXM* `8`uSEe*UR5m)z|Vpjeٳ7,u˝5y ,;w5ؙ‘켚jKPPnlQ?_؊*'3\oM;.8_t,txwv0>:A?_pJE-êiЗh0A/*L:MA s =?w[btF<:FG1F;o4s ŋɾWN#^O,a!PzF۟W*/?/R"TɜZ|H onS%ͻG4^yN3?oKz&4%j+`LzF0 4wixb=&031¯$dЋoL$04S8r'yp6;/*XNȜy]|z;O8JJdRQKJ0.lZ`0{ 9/mʷK>6_]{Nr{v `W[YR$yI~-nmS 5ŮfŧUOU"eʏc`,D S;G~f<bKasYhf$N:lt>G=7ޠq5ڷ|3LZ(@TwxB ːW/.\K \ t2T`*E\Ȅd mS!WDYuT͍GV*$+Q)qW B?R|햾A% "Lg+)VDmeme"U` r)!QsVV|=cn$*'?Yy*üLX":LII)m_LNKemЌ& @@PM{E_XiJL=Vk~gNelG:7gl\Uufq@G/ GGDx6^ߚXVg8NއJpDDيpp{QJ8Pbѱ#*[ lΰSi=9J9<\I5Js)aDGRs ,RUBe1HkE੉u!ea Σ $#1^c:Cm<'!-^] ~ R#cv.msSck bBx(h87o<_嵲%.*15hiU1cx%,q/O|8@'8y&捞Ņd;)S iÿ?$%A0kVfC~BƒGmkKvoy)%p8r6sڍ|T55*# Y !@+VT OrAd t4zʁfgzQz0G|z U|sQthPFL\`Vʐ\KT 4rAk:%*E TI`4O=Gc;e»8)6$Zdtɫ8](pO:\>mG׫ojJeP{XV6weqmîzsږ8uvIWSׯϓ!f^m]/w)~fumRP0PN :uY\jei%=:uYJ5ԩi&5c- q\ q +IWB9SA·U\$=Pl ;zt%bKQĖhw kep!hxϨ 9B FNK):k >.6tL1&OevQ4EFh;`؇^W}B$ϗTZ%NV5 *c1鹴)*i"N[m<#$eUl5T&gR`iNη&XH.RPZgs [xg guV#{ګ]_w\מT+G8 cڼ&Ga(ja(.Za(}0Rk>8¾CG,fF]equBCQ*Kuvԕ S݀ Hir;oI6Op˜y*)'1ZRQDoCk#bN<"pFS7~ss ^\M?D`eäeä4Դ|~J6whh`0pD xDQJǽ: eC[e6 @ PEJn*%Ny fCّq?(4"Ddja /9j [o$,ÿpYesC/tgPi}C7 >BS|.'x])LJKԮʩY mer:m]អMϺJe[sO잮]׳ӍC-!w̤,]5/rO.[WMb/;eJfN7KL^r|)/.s~6M'l;qAt`{L㯛uj>S-ů sd4ڡNٰlsDX>jbP\_M,J+RQ V,XQ` V4TárԩCx "?r"?:"}z䆋d%&cl4v /bfdwz{a/YG|?LԎWRp3T ^Eh0Z, M=hh,ź -:l_Q8d0OҖp_Bx mt Іɾq5ι)ra hecbZQ-I% 1D,{.nHA`0ƻJ&kktaEeoȻB,8 тU!Puq]jsx#.hi4)ȧяqZGJWz,SmRbNM^ޅ_r_u [0b>CnԽBx_Gj)ѐ'IMH1핺uvtin;EW~C&cVߢǰ#g7^ȓF_vkwwMul̤?fyp]:b=WjoRhu]!6Abf w ;c_5/E~7?Un6!jZ?1#}c ?oh[NƩGG?᳟\.sl^G߳酝TϿfq8?/q?T Np NOhgn/s>8Φ9W5D-u'M,l5Iqu%0Eqח|KP{I|5F~\O%6 p?<$x;<.OW]>_^OB[ ޺hPx##E8]'?_.2EX74:.}??<] OpfQ2:keP&yxFgqFas;?mc?O Nn&̯Mjqz?9ClqxSwyavǤ)GpVJ Uh\@Q4{^hםI/"Fb8o>=l3ib^NTly, vL^^[.A]9ޡW˔,'vy~`Zfƞօ bmx`GȊwjm N3AV'`6̒OQύ7hj-9_uyd((ór,& z$ B'C zRąLA6!"`PzE^>U˸?Q[^]۫Iw'ubo ?ݷ .,g>s+5U Ze"y!: _Y4$S*R* ThBd t4zJMPG2JuNǐK+-*@U͗U٨PKJaF qMDH<Ymqy P졑A~\&N^? =%nƓPkyGtvy=!Wt2m}^W߼/մe.8Nweqmîziږ8utIp]wSׯϓ|!f^m]/Lv]O X3Uv6BPEiS֟Jm7b0?o/x+IWB9SAh8Vq7TRiU*Xa .DU>A #'BH s)Eg-: u)y w6<{KίY5>TruP m+K ̟/,)驴K*$AYLd"c9&BYy)K~䌳"P,qD EaRT,U^ٿYo`T#ګ]_CM]5)"Iv`YeɅתxZ|{Wo+3uRǍ[p+l ǝL24LMHqt&<"R I3,B=9?[9|;3RsN޵#E/;\|_C63sY,p7dmmd#)3qbKlGnR⇺,ůůsԉPУ"i%% 8!K|쬥CB.j F lymkw r l sGa ]G~F'=0*Yl^u⩫:vZ lz/5>YZ)ԮRJk|2xUk\: ~M r b,  sl&f)=PvJ'@cڮk  l2dX0v<6`ddnmBmH^rEA%LԶeTtJIփ48 lxeH*%PsBN>dHѳ!Lp$tA @g8)Yo絶G h4уov~qؔ~k /!4=RހupG˻چS{w0rU/4~hx|Y.Gv-PĊ6^FߝЎh-MͨuUyOy㌭W Cy mupi&Z?}֕710Jmw"j%YlfRӋcO 7mBnֆZ,8/.'ɟ~ӏG:z#~kGOoy-9zrks[(, p鿾7inoմH.oh/o9bkBj!`@g2l,rwhJ8|U0LJHQPj W+0`7|cU $^We&(0`W~m im293K_|N]"nxƙb!IBX| "#bAa%0L1Z  $LQj/hGrI1PɧF jТu{(6Mwlm](e ! W/1c, |z6*ΰuNMQ 4{ ]dmzV}Q{I)cg8jRCTZJ!A+/OߧK],4 UlO P1P=SU#垹ƞuBSW-+((%Hxv@NaZ1ّ) Pe'*&N'uLm3eU |IMͧMN3S>?ci]"@c'g kTTIxg"W3ճeA"]a7mԆڬ<_!o_ ܂o{jxlU,>[qᲐɬ9;N.bHEP.N$8i* 'd|+®ѭdhʒ  brQJy .卭|&1e^ҝFG) dJcJAd\%+I%e;|;M.ّ(>&z@! 4(eFu^@ `6iPMɱ[@QYi25ld ),3AI6{66.N\ڙ8OT:eҴIqV|W;ӓ-QII[ϧ܆S?lZKgN[ĴVM#yRu'jopZo^ߗܝZ#_:*tQg!]lNV"cK)@y3FShcURnf?r%0wXBR0ZXHum;~VvB>-m" a6x,/+>8M H-)gy$drtlW.ΚڥXggZ]tE.ޥޱ!sP .L>*]V)B)ldx]iǶwlv LE 4gs:mUm}.ަQ,*ߟg?P#U;7+KJV ++ʕ'kO~7kYTXJ8ȍsjLJ `6{IA%&oS~0fy\-t;Dy>6Gq@7=s zx3sHvGuYhx ᩫ S= ThvNw#U,77*}TGT}*0tQw|k-wrvj^\{p՟z~D_!#_c+mܣkdX_Q"BT RV?jXhV[Smo5:8 xV^ɛ !Tᦡ=Z mLiHΣ<4$,:xW2<a>$2˼(TZvH-Iyuʅ4 g#hu1F9(PPƒ RxCjk.j:Oj!YܠVʮoWXlRd^) ʽ|l;oыM:C_^=t_6? ~HP=x:"UMټ:%&0WFcs%3rQi Ԥxq+5,>G-Qgzسy? G)?"B1;Dⵛ=|G*ĬC$N$W\-PWDI}.*%y]qB:u+Kq[|lҝofgn\xO.n<͟Kz>jn_@ J<`6ъzy|~̈kI% 1~!S*@CJY0N4)4Es>vSh[7]ڇMHO bS@@9]4P4zBZCN tS}%F6l^!Fj֧׃ f\+MöNz/d_ c>ᙺ4 v]#NX~.IS&QN7`ovrRF9ZmJQ`k[on%K@&dЩ23rV0CZJPQ]M|aqH}1_} z6f6K~QڦӌM8?Y󠬻z>;D4٢5*H)\,d'V[FK34% !H`r1D2ҹ*Ց'UɢkĹߊyN3ysŨ7{ꍬ/~~:z#Dq>U9h fqiBrMIժ `2M+3WVj$D4Lf aYIrH{v4]z%8_Էйx}5S#}'m@HR*(}ӡ1 ,>VeXUmMh)@"v"K]RQ-٪V_\U_5lwI]I]` ;eEYWVzW)h*M\Df 4\^`v8;ҽg砢#1`N)XyB*6:Dɳj4%Sі*ht '=6*Cp\)䤫l2's S; t6vFΖk!wL(*<|8Ξ%`IN!R yLBhHBeYX0JI YG߭}YGF5o" y_B/" RKM.]MsgA팜͂o,*yPԫ3>ٵ踨Eq|90*$Dj*֝M F-4l#)21mCT+iL @ʒ젒ٔ]qLrh1-|&;#g32vtqƮX:Bc^Ryь[ܞqs il.,<*?M>Wz庣WhrFd'H{cD53[i!h2AR:Fl 2+b$ AHQ`S_.Ln$s̢ vEfqZ11LwWq(z5ص%e#ccp3=aka=,5ZD) :ì1& 4CȊ-aMB@ѐHE2ɨ&Һ96T3uLj{Dq#f@4Q\tdoKJn+vJ&BVCt34ZI\&D.8RРV"H4IXv}?+rf9FZ@8:$_gg\+.ڎq:K&U]$OG'4rlD ,OA'^pP3cC=kH Ku]TSX⣝8=md%,x ژImS !&k T (+W`@S P2*iAJ&,!Ysr,uwFΖњK>?_wrŻP2m4*m<Ǜ.,K-*:5F.R[s|Mncrh ETsDɠd>e$9,m"P `G#:_ui/d¤IQXZ-R  f \y)XΉtEN| < UR7 V Šb1HE˔Nt$ AH)1QEB! #z/zniABc,g'% : DNX $_To u+H8X;[KV.u=T=׊E#ls?|·4w+^=W'gɇ:fajR%5XN*% HJ錣KGpyN{q9{sJ͓.׃K#j_/nFv>srv2sA,9<=Ϭt3("Fz>=?Ԏpb'֞nD{7v*/I '5F Y,;dg~C2XNXAp}]]uM-.GL6._+Xxj vSufۜKHuKw(zUg+T5yYDž2`'MطYWyF%K)s:/uN|?`G9F]@TiQQd@!1f #p)9f%gi#w"Nw޻wdU-Cv9L.UvQA(R̃ 1(Brv6͑1x:6b!g秇yOyLgdףbz}ՕAt>,(v$ΑW<:''ܛSB{5/>NI?_ifD5E'2>}Os$测ӳj6>k8s]ͅ_~*$lUitϵ7(w뵠~oXyvt/cq$#U~oп^ >Z8>&'d(2RޙӶ,}ƻo[˶,:8FFZ~kr%q-ps_'x^sTHC=iKX18 $ST1IN(|jΥIe_[^]?qTR0&EJ8轲m| *a M\ }ܶy5~7|vEyb؞Ӄś19u|Eei -kۆ<+,7ly EVg(șՄΡE$KZU@xN({P:qZփժBWGME0zdzTVګTRijwe``#)KZKBS^!2_aXc\9W+Yk›ӑcek6X>wr%e*TJ *) 譿GP#(ypULQ1z=:^_wlF0I6htLF$#2 !yIt$!F"JgYr }:\8#A"Ld}f2,Xf)MwrCykQ+Սy,y<̮VqftLg-m~2Z0ON8@svR-wu5QÍVjڹBt}qڋSA@Ew`6Kin]mj.s_C))&*X}5٨2Kd$-ӯ/?4tۓe-[{ )j(tIP"J(I"cd2 M:d꾖o׵|hѼ}F\YTft otw/\Kta}Ө:EZtiL snlP]]^6'O'M"W$!DDȦ%;ER'Q-JYкmZ5oߓMJ0M<+L_F+s$hQpag烓c|M[?فyЊ٫"o튴u%lm%h}ru#+I-$pXYgY\tU$PayO^dy^䃋oZkN̕Ѿo{{9 bJ^kbց6 ϲ62yd,]RL0AUoJ>1SE;f +k@ s ѠEz ރ,a/Pxaz`x`K B+q\@ǓI]_'x<\-5lnmK 3ڪ0l*:_f5Ηl|ܝR{˟wAt.';7Y?k?ukz` j|5Я `/t27v<9|GQLy5D> &I½L؍?y繛0L0n`f(F y+h?a%5.n4vC_7a?ߦ?=WwX |͛Z$*3RR axCL)P{, yoǔe? ز:9}6ܿ<Дf#>BwC4Ԥ'75ӳIizsos$i_ A߁n:oM=T xzw]c| o[vޯ=? {=8J*pbJQK*.lZ`{NϐXviSJ^ygGz?V5bQjv.рl.#j~P9m}wWAK|~hnlB*ZFy+*oIQ`)=&]:Z8QAfM7i ҪH_НN5dDEﲨcċ "ǙRq]f-G[FLIn2Ŏ* [|w K&_2i Ya_9?`4=xBDգئkؚɨ0^D 0?H"ĦG.N/K=jɉ/۪n^]eq|PqRˣ}:9>#.x_)v2ߓ*g"xJ瓎].Kon.ԮzC3JcF1+`)$Ťj+4zc.towJAga@8q\Ij#5VraG5 cI&J>9)m8 o8Ѭ3;2'.ezGuWg c61fl{`nU vX3#[\"CV23%ӚՏ4Jۏ)bot-J崊,GH\Z4Cfȅjx*JM4 "WL*K&Eꔵ,FjD3KD(Pʍ"6AEw9lV bD h̽ ͜;!v>%9drO.8~|(6^d!o㊳/J2+؎\脷\=(`Ym,Mg/rȺ~{L@D 0GM,CGX5u[9w A_9F`0&VN a%!0ZYT+jeQ,EVʂZt$E"X@bH,E $b ̑:WAsKŠ+TJ^1N#E:$IxYe/BJ´`;QBh9*k 2 dDbIs.yVx)C0!(R(,3l1c2b=6MVHz2f΁o7cͧw`krR&Au=-k8ឪү.Sk3vsv>ɼ>g[h0ri M[$00ή^0]5Gg=qkk>S۶ռ|p+GxKST(kó=ŀg._ǻfwݡOMr3?{ϟ >_IYJl\9!W[3a)Ӧw'L80?x:[LiJoN!y^k<Ӭ+*a㢶L lpqVk,1QMP6r u>7lHη }-o?oy,_u9o&7tE5_i谚=@I~3_LG5cЍyݘWx1]:<__y\qq\dfYeôbɰ%7iQh 5zGO+\zUOHV/$ |[,sZcޓk;#Fq{ )ߨW^q i[oi獷0v={ϔ$X"g4D+c`6Nbm6U @EǸq WX1teU$XLjA4tJ9٪-}رW8-rb8yå 3`LLj9+j$^gНi/77-6cWiބQoǥj׽k\gh|;…ls{IiePLl@5-uZ5ӊ3jmnzK7]ekm@z)x9n`/W׷տGY'(VIZ1aU= :3X0^I+ uP'm$kǨEýes w8>xE1Ho]*3F%Vk̽ղ Zh=,711m_Ջ샞<Kan[0RDJ=!$!31t[\|gˆ@)u;y-Xhmp6*h2:ڀm;T KL#89G83).nIbWIڕxLwm+JdL%! X+w{=ͧwM3?=BH&nF]8J;b*(d! ѹBH~!$_P/"D*4B ISXj!|;R96pIYʓ @u4D{SPt1wa|Ġo0u!T Iy63fP_$ɏz݁OA .E{n cn` :S3U5bg0AOfXׯ5/fNspvz퐍T]1mL"JL'B.eK~* G˜V^`썕ʧU@4Q\8V0P1R[(Q`˾$`{d~t~9?|x y< i< !,2<Zc3w&f(qi8 <* 9N|0aA"!ȣJ1,,xRysi4L7i1.A$/[?y"yYMz|:?ʋN*hKET9QBSw B"i4LقZCfDQ N:BIPZrR>R .-I%pa"Coqf"8ܞ19=c>fB Ȧ拄6;Ǚ\x7Tf8O[8BCc j$xă^:3ƢU=6U42hOP&p}fh/#vHv0=*w n.s{l;Rvq)^]=C_N9Ț`5͑p6 eHGQXy(a2$\H0,dDю  D9|l0փeAP af~X_t:HFKDZgxwb 0S05 " @HZII!:N廄3!jQzHL~z ",iPA$ #Kjr#x#VlX(3EQb;'%xAJ9`{c&<` *ChXìF"5G;QĊE*%,a1ٞ} f$/;(ΚN]7}+)Ŷ3̧-U GZ wLF冊ȹ6FCBY& L ŔAᬫHXG"\JlB Oኈ0jvexksVd3e?DֿE%WZ.nS0I/C>*:,Xh0.Oy at+ULh\;Mc #m"sTʣ0bg$uPbҡ7jGq_ LwD@, Q* J38APFdb?EP<3BvSV|:`]t[iE?NzI~hW['*IŒ/ym%k)rH 34yMfVQ^E3q+#sjR|߳p=?ִB BH0wyuyD@zI`}I*FB<9tD8Dh]ʦS gͅf?8g-Dϥcd9PYB $&6F{kܻT(-гa̲?럽FIe슰}EOȓRDEmvELY2eV$k:"0; lN|0<Λg(؀ In&W)M2rGySq|(D>,>3Y>TG y61(iF FE"IQΨPsRx́ۤ4~c|@R_tl:$9NP&fzԔTuO~|{6?i>8\g׉5W bNQǣzd^쒡l^Ř?<1ЎCqZ-1~sK!xe3ˬ EY(4i'/y;гE|]V ZjU+ABˋVg4+$5́}/.U}6t8+3՛*l!&*?r򦪖ҷg-%:K' IRr L>W2rzZ[G5U\5UN}SwqTԏ37"뗯^>Eۣzѷ/^~wxoa/im@d {.Svد6`k MeJ#6lHL;HysQ `^rӝKκ.'haVHKl~## 4ƹ`1X>N]D%^r6$Qs-9˝R 3]_k{7P֣ꫀUw:K pԌ'Vl=ZA*0Zܽ]"zWQ.f7pU_/;!>tho?ׇٛɦ.CaCcS`R=|)w,fwo4?dV+$T΂#"].rjV;iN9_ܳ$]JuG` +}:H]8x cdP!{&CDQ5SDcP$%\2N^9 Q Ƒ6;#Mrn֑#% a}Qa GCkO:Ό.UAIHDJRR8drw4#􋷓ۀ/a~ӛilE" #-V݀o{:0[Xq}ۧ Lmb)Mrdz*\>)С."?P`qh$H5qr-c1cNߩcTGK.%w }GyGy28>J!w+`iԿ>zYfYṣ#Rx3ynY9̲d&ՄW*~֘.=} "v7i"Uzm#kf{ 3_Zi8h0ɋ׼ 'T?ˮv0+g:w7SR>D$!wN kQE{|TqKG&oV*^c(di]OGJ~{IjbF )#7A=d/jͤZ/KJj5l2kWF *|H*D>! DOEGj_#R3NJ#\b%|UG,_bB%qYQ#us%,%<K')G7-_xnLtԬ:'O p)W_ٰYwS׿@ n>z-IS O'FZKA fd<j& EF߅g$W #o<˪׃ z3է {[ +E(rAȗnyք'cS\2\ *p= T[|AK%QI;nbtNm5bh+Z{!&h\%g\QNS\D@sHn5RkG[=5nEKS I#`!r2p˽7X2 Sj,FjD3u)Pʍ"6J:2,"1rb@Xb_"D4v}J9C% Cԭak,O^bӄN}[׀:!#AL^a^r x UX}s&ɕ2Pe4A,HY,z%^LGu22L230{-#chnW-w'[-`Y̧(LS$zx0YUY;k+h.=ߠ‹3뛝4Oޚ3ff޼r2Q[9ͳ\KڬtB5Wj}f ~]LoJP*ץWYnzmeT#KòLk4p'Hb dp%6ZcBCu~ѯXŊ5)8$0tQFv[͉/u!n ]4~g_Ժ(SVy0!)09QM f9L:jDjb:Le Jއq{@}p6G ҽAO;wF/ǧ?YzE-9%ǣAEG{I%1g3UaČQq4q>MX0E/pOl9=qR>LEY:)UMU%uЗɴ_3`*H:0hrPOZCm[aųô_۟uڧ) gQW~aLJl䳃oA-f ir͕:G;`a$7)0{~6MdEKirY%HT):RrDB& ec Y3-rKeǡHLr;ɰyz]eiwfl #]3wh/Or!-fPNߍ ߲nMW_Du/>N)AisE-A 鿿JT1}s̶") _Z(^uIa.n!;^ 60HV֍,;<~ŖdK[eʖfz>xluYdW*{exK3 i;F杺A! 'K¹%.8mL2JYQ(*ek8z4ݟ2k.s|ʃ<+}@yImuXorO3(yپ-2bQC⣛]jFbmXQEd}ܐ>!7|v4,vG.%gtd.N N^rƷs'`y?і E%s0i}wk!&c@&xaE:q2^h!Gd2} j9hmZBꭋDm F&F#jI[Ύd!ciI:fdRW^r WN0"5F*ydM #HEkҿdquW9T 2j3d8`;>Tއd8bl*[:~)m"(p~=> XjFpNmG 6NGrR7gќU@C:r:ʍJ7QGdELJDGC/遽]2. ZiW8>3c81 hHxIA[c+v  t\,)ϸ{ge~+g=gUBpnן.k~A#F|+g蜓`۴ȭޏ+zw#ܧn R dtBRnD|; 2VJe9=ȈJ@n9ܹ48NN>q|dŒ$|ٺdk׬ @M+V/ԻAg͆#馢I c{[ƪ0v̏IVALW4W׋WkuRЊ8C @ȍ25 E3ݠuÆt;ѷWH^%r1}lp WC2hף(yM:|\j -0iE#/SZE1~n}u>*dJ,ZҎpbc$7?At$k=ìʏX/[E+XVrwc/M3ݣ y$F횫`QDR>)i<(w.G=Ԫ;5a7c,}~v TYm}IXa X~e@kt9WNi񈇺uNjSOq8_9 Ǐ=Czhxiҩ- ͸;qRwo9]@BJ`@cw~^ثWn(aDTriQId,;`8Kxf%g Pc=KҴ!yBJ\T8T:-bl$9{ 33&:N mbQ6MB0RbPX-T}կĪa8ӎ0Lwnv6~7FFYY# 4>@nJϳ@G(e/:#~ U1TCCt)1y@vKxY 9T΀ב[Bt2VX}*7u4\ɯvͶ7 0 “QF)T3\rjαE$[PA]7eyJ XGM|J{HFJ iZ{)4X#8k%Ig7( Rp&,^C^+u ;uά~enQ/@Aڑ {\dQgڄV! (MP3+2]BA[0:VIvj<@:qryXYb]h'j(xi܂[|W|_zi#:p(2DH8=#RIY,& %Y%2Ȭ:»@o_һl`w-OĒZ5K|7odh'mn_`èI9ȴ:>bBce@&N@Xn"1xo| !~RpyDK}πsTx CAnN{I5I: Y6ZĀht8X4Tb\)U\f!G2E(LeX-+6C*@̳ϳ)|1PWȬTB@HcLU` 5`{AYP׾CŬ%[b<:;Ik^~ "*X!tNq5)~V#iAųIʗ`)N8I1>n%CJjܐf<ܴsIZセ'gmn\xkO<֗%kwKZiϗ  12 q*֝MF-aO[1p2% @Uh%cdq$;RBQr.M TmX5c=RMV]u e o)*s]d_lݎOgOoişЄh_YcZQ <7FT3CA1ERVS$)J]e-̊{1 )ڔW(ƒd13эDzNYƮF;^a<];Ek^kvc0 ^.,>bsft &eRY ;pI ec UaƆ&ы!WdE't KFppR؍DKdTi{:V#i}˃ȢE#V]5u{D#Y(N2XDO"1%D#@`eV.B3f,!ZI\U"=!&РVK,i<,WIT:N5rf9GzՑY!:qɮzV֋׋^Βg18:|"6!.1袗YQ^jܱ>tMAݖlhPy3EGnռyF1Zt\Fя륉l ?Jj\'Q:JjJJj_a%52(&|*)XȵXUCWWD%^]}1n J2{su,oGJM7QWϣQWϢpsX>C]^]Xp$r ֽ% Vn=p>{i„0^5r6h5wty#؀,kƉ&p}mw,=ZvLo^ƥQGd hыr] Kw&4m 7b w& }ZFY.Tڶ>Ҡ/A\2A/TNvyA;?o ژٻ8rW#@vi| lA,`%|(+[^Yw=vKrђJQ ,u됼<HXUJu,ÆQs0)80n~Zu\t"y;]f5 ] \G+AJPnY󅮎t͹D BX~>_`s@W/ͬzlw|{G'9Yf{~1⋿pKo"}-K׶h-TPST?Noj}*o'_o9e'[: /^'^\wRÈ6|_!pZa7o_7q=45ۋYn8 חg %\I oMU@\׀ƄO?˨eW|zo(w|'7̱^g㻛9y_]wwM7{+9 3DQvH5{ )hRfRMPmj"՘L4a bbaJӕ @1ҕ%gH.3/hϝ_hFz;K/>pɺvK/Nxz)# ?ש8J= Jc̟ڒ:s9jχS=\%R$w;éY7jPSlշp@R ٙa jVܥnjG(ՌwnK&6CW7 32 a#BWGHWHƁ ICWךQJ3wJc+fq8K>jZ/tutN DWlaiF_{Oçӕ |TsXZ7 7ݜ8J͠tz#M|փJWx  3v%h篮I2 ]] 8qJafқ/3aMo,[]ii>C+O?NYiҺ)V2`ݜOX9Br\.eрLk`ʃ5e|ҲÜ7%8 pIDa&9#V=m]c/WL3l' "$̝ ]!]e88 ]Ibڛa"J:Bb7i+|^5AA{'Zxʒ&i 5aJ5 ] Z;{..tut2d8cWQJк J]#]y 64Ύc׏cN{2؅OWp}ѕè+ NW,2#]Em{Mly);] JoMO/ugS6Z[:oy 63M&=fЂCPL0MiZhMBtnvSU_ j^p IƆʟ>< 7O2N󧠤#̟fD$8t<]׳+? B,Iz#9lû Rds'wv~>e|> f]*?]_xËA!_by{%66*Ko{7"&]נwɺG:?(*݇oۻPtzuq`;n~.%FbԿmޥrvq&o88k.}ꬷ{?s= =-qlJs~F(޴|kFVVk*rFu* O(ﮘϳ#eG?g5#smf{# qȷPOׯ?HevӘ%mUϯ.ʻR[?%mi-i=d7rdL.GvFF8*vB0;o?KǫkH:n{}_ݜ\ye?+js4mmtOT]B٫5`g-%e Z1UuzuP:Q5fOBJBnV[rT{U]*)ڎ®& iI}Wjeߺz`v6U&reN ;r t`O"TrZE+\ #‘4(FR)mv>k-6 O d9;[H]ԻfmV)UwPRyT}e}hƮ#D34fEԍE^3.KNI9_ឈ&3|^뛹VwG@R+y݁6l˾Eʰ6&]J'M9w4F?@.\ʚT{}bPfgBkZAQ! A uQE~$iIjWSiYR!]AI{qA1#3X2d]șcq|}asἹI*U˝aQ <RIITڪTw$# WVS vnEFf8mI!##k$=$4H/*Ba-rՐR<@=%#Bb !cMI\=D@NO5/S O ꐵ5S&8_c!ه0%>{(R.՝` *`5KF Ȏ ўH6d#oG0Lc*yˌBi. 'XEA)Kh^5TTlPtԠ-;~9xC9v+`5Ge^ @rܪCZ]He1ltp Օ]`גul U Ͳںzֽ>s%ps lZ( W lȹIi1 k9+"}BE(kGU((NU$ԓ.eX|/H1O`K(J J+.@:0؄WwsW2}YB 8(5 7EP TBeg d'X22*zbw\͐jPo]\?,1l aݙom> ȄكHcgrcEBy>uoY{$NjBQ!6Ks ` 7DIppR 3ˁ .QkU r#3A@'fU5TTgJAQ"Ł.͑vA(xkͲ (EzeH_u$Br!صr{vR5QrUuA"ZZ.~=#BJucHH/gNBjS"Z, ev1!o iXGʩ|p.#hn!0(2~Fw3RUeĬsDr2|(pl^vN$ / 0`7VvaUzw&O:UݸZ5}@6=Dzxs:pnPmJo:@EVLD$W{HVX$ETXr z/+XX茸paZ R|$ L&rZed^]`5Lwe6/xOy L}򨾬$kdL5E*@vdm޺ ,\߫jPZ5tU;k@6Z ×ة`u0Ovx} ~'KS t3z B&ȈG#A]O9e !/s` /wK|̡8#tϦ_I 0ePwExhM{6CܖSelJRJcEX ٧ "+P(vwem^&$˽eg:|lyFh $"2Б5;1vwELML((ͮBUqwqpD쬮pj,{a!d*)njpF̪+(.Vɽ4K2BךEu64IyF6@ pf%nam@\+w2VUM 22^n$Dm WC%![[&wh܃Goq .VtZOӾ?xn9׉3IV n=CnIf=)SZq'Bw'UEEԶZS2ךB$K9OՓFCmo1FvPNjjwkʌؓSC^":$5\ ty݈衭D{N,WR SA4wm[4&KS/7o ``fb$|Hl<&)R"[ V7DZhQfTխSV @0KK =|B +҃VBַY/'2 X!,W"&ꬍ''.m$L{]5:@GVbra#w(iQH.|򨜌 *``ݳVu U @x҆x~$!ɵL 1׊#[;rz l$V|0|$i1iC !TCx ie0w TbƲ7dBDHЪpL(Zx7XB۽bae#՚A\X/D4%7LEvDܓA'7QP|LzR0[bix…L`4FƬ\+Q/!X6=@d~9X #!.H'h4p˔|D!B ( 2dN @07BW^\?̛;JUQ}) ˼TJEP1C:rv ٛ7"Ƭ ҥ!|DSL?d(PDrHf]1 a1 /&ES(1eo!F\̪~j2CI|?=Lо|{V8Y_P2%Gvqi:. $zfmּLPǺyQaܶ2UBͣ)xMuѾ>U,H.9&q\K;RwJ tN ӞA': N t@B': N t@B': N t@B': N t@B': N t@B': N tx@SRw pZsN ,@2: N t@B': N t@B': N t@B': N t@B': N t@B': N t@GbLK&:82\'PF;2J# tNlAB': N t@B': N t@B': N t@B': N t@B': N t@B': N t#١wet B;%c:F'䙣@B': N t@B': N t@B': N t@B': N t@B': N t@B': trtu?֮Rb+t஧V+Z{40R+6fe(- 1/om55ޘ(K'7NͩZs,)-)+(Sҗ' Lmz+mf`.Q]QR{1s'w>{08 @=u:c1S_l0B&tkulb4 Q!m>~POS1KU,d6麲>P{OѲpx2 k,ɉrB]CN8bax~-7/r{rE`Az'J"?^LTTĖ^z[V,od @ 5RG\r>=U^N%F>LlӶ"%ܗYUZ2?U10!zVH{d{JI, 0'Oh|8-MޡLSI;eZv%C2J.1;LQn]1*;CW ]eJ:]e[HWCWj֩urIw3\FBWNWbHWGHWź2`K:CWTt2Z-! #]Iɠ]IخЕv_(9A:BRPեd'x rڙd0*l,U^Y]RW]\h2C{XՓЕR.+Xbž*GwfKz~:-Y[tute5t rBW֚_jRE2tGtpf j}zA {[.,`4}?|[ CLU{4G~p*|m_b!1tfet@]򃙟?ި M'1ܥ6H(w,E] eEKR / 2p.BrIig Wٮ̟e>r&8ɤdt0g3t}@ #*HWHW0)6r 1x 3z95RX'WYd8K+(u^4ޞdc^뙽=)y3H qԇ.R;,m=M֗C]. }ڴzB4Gh'QxϊjNōlr0iRyIEInN5puiu?8eO;.._>wN gyL%[}BYH,^Mxo|l ~_gfh/i|`:֮ }@;=-lݲo '-%CR-V3R-5+R 2]eT;B}^V,3\әвzP2tut%ԝ+lhw3\NBW~*J)l kup ]eUFiftiWDxg*}n}t(HWGHWFWi^ q.ӷ'E!X'E͡7r, R\teC-4!m?'38qoDᐵֲaܡnwr(Ǚg Cmu}YmD5wbq9q/}Sm|(J UX{unߞm5]~}d1W҉`UɌpėy ~{6)ySҿA8Jg0~B9 ĺ_O~*fUn~ t):.pׇ+ze95yWxknu17F'=!zV?_&KͶ5O~v?{ú0{)*ȒLk,q8(uAHP)V9sڄ5 u/ebxS>7KeLp^^)w ϥs.ERIuU20*潆V]Ӫ3}i??vBNWܟC-͚"3ssg_]0o,`d fQbg+79Y[+rw ZgKуlI9\iƣ"85!\)rΪ0MN p4Q~?ߞYoo.[zQ#IgFY~L~0|K{d!V"E3A@7k.eUDx PeG{H%\|4kv,$bv ×fK{e0'B 뚂ܭ6m*h;LLkZv]Y8Ÿ/~Xi˖[.Uz*ݽ{!\R@>֤eNj QG!{EL0JQh%TIܥywJ]{I=v+SK9r]Z~}=H-n{kM,4.p^(?œsxW \s|獌뛾y30|2[Έ" 륀ڐІÏFQ[{,4]힓|h1k}j~l/_] Ь*_ufegxz#,բd_R̅ԲY,v];SY؃AIUy)7b ).+x[4ޫ|k^C^KFkTw](>|ۻ9[l}.,_ Ծb\ 'N8'%mPLHjcFN##(G7j!+*N @r7傩2,95@s{^ //+CSBy>|$)&9g+aتϕ^1N‡܀=$O-cR PCƊeU&iԫϤWf}W-JW,)9JRe*EC -M~m"v֯)>lI |_Uґ]>i'CV$+;2[;ҺmoHtxmgL/.W$Tq#< T)Q;/28 ݫ&. ZSeDW$L@R`΀\&VDg78waK~}'#Vh˸kxqӻƹya[},G;JnȻbF& )J,dMb,Z@ΗD\|á$baϸfkVi-9 H!J Iee+m#IؗNIyD^ Cg{y1<"e)RMbQ$^ReYQߴs'C@-Uz*Ta8ʖ8|:zce?p 6%'`j_*$#)%BT XWgQc=1$o`{"!ޠBf2.B.׌9"ni˚iToH iS{Z8O i~Χhb·!1$N&## M5[YBuYងk?Wb.cM ""xM\*H)6?NnjS$,g`wb+bC][bştYiOmϰV,O2x=Nجl]3n()V#I&ήم{3?e}> /pz {9X\/jtAM׮M#/*smIZAwyp[(]uXw7+kKY$p93lW]xOϊiC,Ԣy=f_p?G~n~*9/ 20pGË++͕] y?o-zחv 3E3Z?ii8ͪJfp>//fpmqzmuMnZ`ڙYg.-LH}2DL< j&Z?x+/?0N?gqB~5*;}ژx MOAZ~e@h2n3!cB*,UXp8<ëd$oՏ?T7o?_]{^Y'/se&VP(Vܛ_ԖZS|˩=檻 ϼ7J/Ӣ[ 1hpe6*]G%N<w0 B$*2*k"[d&ƴMtȺ+9T>r'aOGҴ唢3#$I6uUi\8UD-9gg̴g$S_Ty3G 6TӉ[wm~c !Ogv1wN>}Kw]җ>yr5$;^LU։ouRXbY{:ci/DƛV ?Dy)9Q#R eH)CD|7 }Un+^|z UƐeydsȸ'%`If<f6!fS|2V Jݦ'WkAZköw 1tV=9"JiOFr(BdR㌂&Z+B|IA*l8 ;PzZMЪGNxLƣ6- ?jx5E5WaZ#6\d!3]"kQ7l0;th# wj%NӃdE9@tD攂'MNXKεdըmKX'dUl=VYXfSB>GOəeN@.w*hL綒}JNݿN$>8{9DOX, 1 ) ܾܰ9ކtrf,Δw&jop)"e10i# ѝu Z qGl-21ꔪ"(Tڔ%AK1t &N6R)i j#c5qv#c=R iơXPXXx-QY}a½muwY캽?M>/dN%Jyb#ߒOLa DK9QHV [2g'dBR4$#\2|;f]&D3VFjF0TPv jC=ݚpX"].I,x .xRs':aSSYd`BAx6>"fȁ($dQ 6j"X 1I"9$Z^ٍS|[T~2"{D|$̒-;E$d*L7YC$7FȨ:, .Ct3 1 $wY. ΄Ԩhc$O,Oz!kW&Y#js_r6bѭ}w\LY(ճ'JesWncc%5@lԨdQE@7ZHۀ"SDF>i!۹6Wɧ'pw~朞!a]oZGjw^{ӗx1hP)MI{+@d0{|cӿL1Q 4*YWWcu_1[l;߾f?q6>}n ; ;}qo}g]UpV8ɧ{cj *N:n2~b$}`#>gh68=BdE^+d-u٩-ӥ8RWkqr6 s#@2e'P"t@0)QࢷG'AOo'NFh(3E9`?sLJ y{<>LZätpZ==xwJ9~FжwQb^4^!@n` h}4( uN13dc%?9OF̝c87-wY% }%5W2"Omi6u<#ӕ}9f(b>^ؾVU?nñwǙQB2E"{>躨#(qJ6TbL,OAW^$K}vQja0+ VH*<7`UWFS,1*n/R(IJR<\3d `% +B0Wm̵sB7 Ia[)c<}6Z/}SgsiiT^sj+Ҡo%dNF=\*-G L҂AҖ Ph tI)%| IJz!:n1vB쎡C׫s#VfE"}2;;;#1o̅m7 ݮ,><]lǠlVFðjҙ ط.qoN=x-+gnׇde-n;;ɨߥ{ze;rt|wV};rj. p/i9/]9NW5n_@\iGagا䊠,x(ٿ38={ˢbnۄ_9M@b4n??t2\5D6\,F>{2&Ō!#TΜZb*$𶘏ƃ4!ɚ+*/RΚ9zտL=V61z&4%L%F^zxU5WĬәjH'9̤oLMӛN^0cS8xҵ|s׆#bXbGoU7[=P~K܂y0]v: z8]FJrP3}%9 B 0HJJϐṰ;+`oGA͗m#SXC4.|M3Kթ߅]XGޑKߣ1/c0e͕Rjڮ/o ]R̘ˮ.Y+55e9$%# Tpå3bt^7<qaWS} \Fw*d: i7T'kN>;Ԏ]+aVIS!\c=Q;ĝupJ EzFt)r)QW9Ɂ2,`SB\;IоtYxzGmuB 5b,)cerf/L\'MV05Qa&2`"#8H"8GAK|=o~אygq%dvUQt17P[Ck7F/I}cU\3 $:$Fžjt)IJܳ2\M\ȉ wV?$f];jWO0LUvjtyF]|dVbG˴W27M˺{Bi,ßeX ^JO} Ҫ\GFQޖ_.Ӟ)`Ÿ'0b>geQmgp_0ivJ.=LWi&.íT}%\sZuʘo 5RTMr3<yӏ/D= eΐeQ(Ui:M=0X)C1d5m !L{)c.pfLP3CnEyM,DYabYѱf^,ݳHIc 3` PdQbC-(|B: Ş19Zm/]>}c>~. Lh8FFQqeiQ&fD&4wxm'0QNnft~K_k֘߱xwc$5wm_e_\YdW* \+Ce{e))&z㺫CBr}g{g838j!xp̔"Y{~Vr&$8bfK"԰ ؂gA:a \C|J}@SJxldž]gu Ȣ#bj5S+t`~ua}0EF\k Pd"m]{豬Ud$dSHRrIJg){ٹ3ًj^mԟxlZ8LevLQ+cyd?\Dո5vGո;V^tz*| i$,D"TnKEꔵ#˴LǬ^aB)7j[c]JAk{ďs.l9~܅y *>'(2\)siHS'˽yٛyyeTHIzxT,hi1UD˱TY#M!K%RKs;5/e&#Qpuڹad,[X$"﵌FMFS^߸yٙ8;f b>:)G콆L殜|NV7omM͕; mfz-c&#Yf ݬ»]N՘ حt+5!7V3f'IvĦ b.&o3a\C;dۍmӼٮyyrL+gnևiYn=nY,M"nW+ӳ|M]2T+wm~{m2˻?G?ޠCrW=7$\IBq:%OL.Ž~ O~t5gޗ,2G8 zH;`A'jQD\_cwjQ:="RԻUUI=0-rk',L ňFDK# N pEɡ1L tQz`x`04h$EX XXm)<`dwNkE0!2{WvR[_:{z|w@ ҷOQky EMR N@jGxTh,<&n}=kG<{h V-\FIcqJDsDH4 @\*zy (8M 8^<2rǞ Z28DC\ k(I}D߲Y ~djZ]&ԝ=.`⳻K]xI7MqJ2eH "#0"#*FAzwNj.ѽ۸W;az1*L—W_Q1xྺqh(M,izF WN3ǣ@75leʒ|G .]U]?Kh:ӄ$kH 8k\s۟~T3 [Д\W`]ᥧUwE@:֏xHLJ;dAGzJce$9Z ㏤MJ3W2h/YVK`5>LJh] T`|úT`5;gk+뚎wd*-ݓ7<\f4\_̬q2 Y\)家U.͌?RS3;]@_b<N8\z|]=㛟*l|(FفeJ\7>$'?f;>kb'Y}e죮%t..׺W*5geX^jSStO>uBVR&DD&pͣjnDh:vF.v$9qu>r :| fJHsMm< !DqVd(@\R8JR|GIJ{gHqDKoU/k]oEyw:ќk[ \e9h@{%AOdY$tjMLnvmJGQd'J`!)(FVs{1(:D:Rb&y$<8FҎY@Jy>%Kȳ7;g`%QND+}$f1uZ$wk6}%64vN.%U1Q'.-.‹Vw ."8jЂN'Ja1*0`y^*m4U aU8Rn+帓F5BguTo8}M)d9ɢT@}H Q` tEaXB49p4bOCL)BqO|wN#T#c" >(`\0"(LV-G91LS6 "7:1R><&ޑ֙Da=(eP%@mA>0wa9٦c N/"@gn\1IsJ={猠s%zpVpcn9H)@ R̄z5`1VYE NEFm#GEȗm;d`>e7|`_]d#ɗ8+^,ȶw$Zŧb#}>S D%o90 n +NR;2o9Y_vOcݯ=g,%IӸgYS1p6^& Ǟ\H΃΁n(B\%;4.\Hr[B.$@ HJD'Q,bJi!AQ$MU lv`#7&I*\8I>r|9쬏|]rT)2Ipf{! zuAG&9Qr%9, )Ea],Kuf*cV2 skH6e^_J3MjPPڥ{;L{$}x҇n(^g*#=b(mu?Ԑ U+ V^z[ej5G˅o:ۆOD4ᥙ.u!Kǚ Ia*,Oۈ1њΙK"+)5Zݓal7^VG軂A7O骰t154MB]˧4}F 1q'O Gmsxe%`@n[n@D\w^W>scjߓr rv 띲Io-L]L!Ryʠ<ԋCw:!<$>4ͫBfV !|EUYsU2 ^}W!?y`  c8 }9cgFJe!0dy{׌bv߁k8۟ѴukF{-GmKiC0 ֙ mLضgh#>C &BÞ1\;RBĀ|z> 6jIbWTmZb'tqaKi'Cu H^^ ;ݾn Qܾ-|&PjiIumR^joTqvUңUW)@OKC~RO[gǖr"CuKCNlR謰\I|ԘԐ$N~m)ݳxneaW|b{Qޫ7=u| >{ªVhL4I) yiR!|ѴTV(r2o3w޾;yM|Ո~dp5{ӹC37ڂP*rq!ݨ 2lVi{3sV\TKmi{慦 +ړa/,|**<{ ģO9Fymbɛ#U<[C:)rnY*1٨Sr))0 '=5cp[8i\]:+NG ۔ b˪sw.̝E"&,&8"a 3:$ BYe<,kֿKH0K_hCYSL =- B c+/\4y j:և?IicW^8?4gs9ݛ=so&rt<^Tr j1}`&$~Lͯ\^{k=L{zzWNF7| QK)y&L`% fug\tV;-!DqRF޳rzV ?(a %mzɴu0ޒ2T96fr!6̅£rQ(-۞åf̋}/Phe4|eO*8L*4$̒zc)-d >r˜Q%Z7 30"# Cq\&6+lԙ|;fl&܉2 4Ħ)83ㆋn:سv` ,-`N];2s#f I;M4Aj_XQ>RiW:!GH&&$0Ap$r ZY0NAd!ƶ0bc/#ʆQ3r e+ A&Hig&D#AE6ْ$B ΘҤDT(Js5B.<JkLINs %YVihO/Ϊj^gcV//yQ55x1Zѹ`A&.{@,;[A%{^< /}4̇f?><o)ap9n`'wUFkp\<яȸ8B((M__Z|84Wr]0Pyeup=!L;$p/|G d6i]OyHaqG]֖གa$jC&ن@ GH'T:볃Al@hAd2QϔBZs`QP Pxczr͍B&ᶚ>js,6-h͵(2sɯ{ȚS CEVũmEcVۛymchrz˴\?\yA7W>{:H$29TܧL$O6 nLy05G߰uz Y/AW鄊JT"6fcQk4'G !)! ?cvz TKHUgNF~^[1 iVHYkEc˄Az 2B0 bobqnMYHN{ZHGw$BN0=HKK, 7Heg_H /}6XR3# j_%b!| (dQq&B~+H-I'~-=ƞ E-yB_ Gq>?W˞i&_)i".5/w ˛rۛ 3{MՕz) 5_nO00NY'ܐ;P'TNҙ]Dg'idFl+#)yʁ& {5d>ӏŎx^s>G׉)`{u?d9J4?~[~_Eo\]TPFpihv&jp;&OrMӟtTkTm7? he~s}Nj o7m0X9s_<~ZW#QD6b^gD{ɍC;J--lm -mk7#f6M=-ØƓh(;d<\ p~_VꪓZW`u "VHiXc2 /f^yPv_>,Rhðk0pJL T'^⯘dQM8Mr7Ȁh2=[auJ{RXtq8^\'Z$߽w?~/w?Ҭ}|o?GeBI  `j M5M4Mz9i6-fjY)ĐOl~W_;|tHN8;Jɧ<dRˬ0@6eS&%ccL ,9T.p E 'qXKU/omuXEt2f MID#"69[Ig7< yb("ĞQTjJvERp(z5$H]+;+WUA{pUAHW 4CtEU: Zz"^ ]iЕ ]NW4=]@23H vg+tUl;]JTOW/, Ż `+鸴 cteQ*znJ=r y%7gn8R}\O4jfvIht0oxC?ۿ⭑FsD8T} 'J 2$`w{I0<$J"iIz"+<hqب̍|]7&CX-@-bXj˳/qZn|e NF !\?Qgh"gQZud LTs@iYбw:6"qa*>(^ {>}o˄?+Z-;]a+jY%:*bJ\cD}Zw)*D Xͧ|Z@[lӬW愥M8 Vd^'s, yi\OZ=j ` 8h%:?Eq Ʋy4*Kvţ)hkGSPj{4/У)+(;3tU ]NWž^ ] `e ٝWw ZNW0!"CW]e骠HWRi˻ OA]Ju Zm%ҕҜ.t*p ]ǾNWꃇ^"]im88U+;dնtUPZ +Csߩ+,;`++tU2t骜@!sݝ++lgl Ԭ#^0˜1`Nw'%ge[oe D/x|: *:qF߃wz`飸~]) R. YǬ+c){oA I5(a'\|戢Ъg( ƓVnнnw)%L:DWXJ"O!"ZAP%zzt yzE ]Z Jk{zt%g7 .[+hߓGdr>+5^ +?Hf)^ty^<š.]moG+>7VwW ȗؽ6 ~H.IYv%RP=({>9fMOUS/`$rEN^قSBTNj_ˇ&@_Tˁ+;0n[ئ y51VFd2Vzb㔽k46x_mi(wQE]l%U*hLUD#Yn'Hݲz ߷?$Lv`_󵖬_XA6Um~a0xzβ*R}Ao{ލGދz_ʖk>8%WcIZ1-Qx=:.9*4%LT zd6y'=$:*$iD lH%f6$q \8-- Ȭ;vFΞgNJ6N5ٞ-FQW9ސ_n6TIKTZ9Fͳ9#Պ7zhnާΡRʛVH9rC۾WVzD%''ETtLI,`Pp0  @ MJb GѱwFB|F H$@}*kv?l.%lJ{EOgsZ{6o,u얾>}]%XݕZ?yc<.TK?g[ID$*8]:(nӞc;=t칃hiw.t221K>m%'md qV$Q$.Stt,>Cv/r!X"XL '!XeHP m}l=cIvN;#gOV(mAϳP2$yhp!rG jOerv}7G *%1A[ȑ)iwR=|uU SQZޢ,3FcN:2u ۔l6hRVw, TpU{a9%aJ ?bZіgZ-崅Y!sBq?,~yF9aH(,7 r,㑰:-]$ {N`_'Ub{^ h}PMB8j1~,\pL;|D;/|2^Ϯ#l66kV=vprwFw[=d*e&+-r90^xɕVbT*}D.!;ȕx}w +b@!=$&.ǰHh!9NrnSP,\o[a+ͱGG/40Kt +Y[YPPeNcf@9qڇO;qAtڏxôĬ r)*IeҿmN 7 +CP #8eBٕ\\g* yZ"CцA:3rG }Bԃa;*Y. ƣukT|+$^·ࣇ/1 8>SF]KҮ:* 4+k A4𲇗=|WU!Ћzm]x'E='Z6&r'iI|u`H5ힺ/4ri9G&Z լ'n]`Oޔc浑a6ouuw=q/鮎L>;Aƽ^u˚_ 7%9zF;}rm͝μQ9)EJ^K(V+sJ+Z_c(Ŕt D[ Ёcѣ:3.ZpUA'PbcHE8qZwu6 2]pq9ך pk{ j~j[Z?C5AT㈘}tEu%U=J{K3\LX/~uWtJU8̣0.3'~.Vv;VR_u%ٍ\竤&pu\]9@BhK^Y0FF`zpPxa^1t!f.eF)bti앐 e>D,֯8pJ@^qp5yW>[;hӕ^[gVAmQ0ux8G>ܥH8kn}`,h~"W{7c)FL^EW>+5nODw9et#ͳLK] #Tn .^Ƅ]}$^nS|+ΆUxdU\핚Gd4& %4 DO{?x2tYs"C=@_Qgp XTcMIFJI cNGтb0.N<_^QGo|紛$@,l-$Qe$Nl(uFd&HLL1z׉+j 7^~!S C\xΟxJ;VճY%[4%oըv) R}2dIUAfS:}qre{siϘt-թ%]S4,*_}Y+>픋})Wۣ5#-vx٬Z2Rl 5Y4=2!ZCN"'3,8a|Spo$OÀjvmo¼  u颰#ZUr4V1(/ӕ*Yx^աIY]=Szw} =rN) Th'=Ks>8qƲ{nޛ}}}kSoN>*[/q&C>I'}h;OFr֝OU˪UArջ^ *+2W22W\!J  }Nynu=dCuݵ-GMYbhmY#w!$L̐̕tkڜ5i;6LL?}Ļ0xzw=J8Mbssƕq`۴ȵ?m{gxr3n9?/zqOs]) IPѭ8[oJ_fM*ruT\ŒFg$@!s>Vm]^#lN\;yIzsc < Uw,oY_F%ګx6Ų)`][/dY:tnY֪|1tV?+O75G#m"Vo0L M#MHyr&:!R XNȰ@)mC49]ʀ&\wTe/\FqBw"'GZ(Ywޅ$$[L5]b9%ju٧{O r jkǾ~yJ|W熱%%/{a&Y ^Ռ6ӺiM _KOBӺP_cOk͕SszٽZK\sh|d]oGWQf[.>2C9~ç$RR#q#-r4*PGf 6SZD5=ߓZB5=vwH]svWc i'P˅U2 aC0&1OHLbQZb"B31RH"aFseLj  PQ\K6mm<畃U6d Rp'f"Mܾ)}ݯ^$62-h?⦳|e"9 * x< O)jOI\%ƂqT!Y)!sckk?(iϷH_ )R#RJJSV"t- B\d P w_sj}\P0Gbof k4:8Mpai.sl@8d(.@33?jRlSX0"**fsc<pQRAAMTڒ$ R͂tHz(ۗX)<3!H JixP 9Ri9"rŇ.[?k`7-0s20ޮ^Y7)#rI4wVj,kԆ;އIٟ oJ'OB J)7Ў8 ףFvTO T9Ȅpפdy<طIr7i/M1n$Zq9/&FbfZhW /u4/C|\' ҥC]ےJ棄#J.:;[c?*Kͩ:F}wLњe񏏸W-$߭Tr݀zۻգӽ4,T̾}0hjse rp~6ܬFvA m838oˡ--!lm 斮5ö7#f֗ʛ G%GQ;hxyɪ`PVyE'HNc\HHnXh?NR/\}N&7T&J5|S\O˫aq09+"lytOVL2;ċ,C/,~c4_<ŃI_q[gxWo.{~p_zջן޽zڻ󿞿{+ yF/nƟBQq+7 ilo4PiJ|kvv]eJ Sj+Er[w%ؓI ON.FɃ"H)SsZuUʡ5q`)"-@s C#N jUixHCƭyZa`/80[q]]i%VGwI!vE&a+UԺZ5 ɵ"u\ІHEEdi}$с>ipTdv]rzkmYv_Aqy9n\VmiIFRuMkBt|}jPLPEV'0J@fVDN:;jY5tS:jZx /[GmJ;H*2'SnMˍ긇pB 3[HJE#BJ=FRT g.g'4*7YQ`hltBB2Qc.Yn 7[H i]PmGNuk1S8FHoQr{IΫ,s2aÕ:9uԣp(%,DV(BIϢ^X"#skݩ9M=r_dGpVGU{٠v͑./P{}9aXK[S h( EeRy `BHsR~OMAS`!'W0-d3:J<\10^ڨb1J&izlpؔ :g(@`T<jCSrL֬6bݤ?e~N?'cMD D80fs/5!AK=PX8 o"eX}Je6"i4+D< b\pJ'\[JтZ݂/e[TKpg)Aٵ׵:2où-~ZLUqMNˤ{n0WοlZ&h}[;ʻ=_ܲJnIz'KTNcp˖Y 0ͽ6zB(B*9Gr `uH\*$6tJ9F -6!I͌͌*qam+P.w\\D65-nٗqϋ}m}<Ngfl-DHШIA j"`ѻL9MȈR* 9]3cs0dcFsL\b$LP m;MB'Jصn4 k&fSXֆ; @I8˜HaQ3V0kREe5A{@ aRY@s5C hE{rMKǘ%"#jVw)Ҩ_Dfbl #ֶ>eDQ3#;F|P 3l"&hN'.謲F+Tp#(FVh4%D*DdIRQy5D%UbkTi0:O+R.YoS ad%?խю}n,sTeȗT,K/G+#jr7r 8 ].%C4}r(PK'wIW<}BjRrȧ#jVD>R5,\ȧzݝL\ȷw{gZwv3`^8զ}o< 889+5FAoo4Y-%rM0їaVn JنT|&ugau蝩$+eE.77'] Μf{>}oap}?3/a~nN__)(\UPD,1f9^Yvy'٭n$WPF$YrX~ph%h5M2J;K-9ƀQ"ʀU{ huCx?s<"]q}"[DWXOhWl<]eS+Tr"ʀ!I]tpmV6(5ttut%8RPaZCWB Y(%Nͧ+A{+ d@BҦ;E 6&$%A$n+MTRޛY0p#dd0n_Q#FiRDFU\<ٟL=5w-mI2b;l$X#b ![߯zđhj,6-{~> u *F]er>uUjU裺'*s@ fPUV=TUz(u%BoK]er9?u{2GuM+Il=uPԕ$*SQ]}$Aor3rIfKL, ~OOt/ݸatW"Aj~ߊ.}@h,Y ac*S -z?N 29yTg%WcZhx1ΧSdIֈ<睬j-_  V/=63#%3UQ}9.\w.򃗈2ttkË rhZ E'b}փotb!J͞LQ 7'ՐS] MR˙ҍzYOɷC[͓N7Lؖ PNXt~~;À!vScp~ƚ*dQI "t^,.ef\ʢKD,|:MPEQOS%0MR2Mzi0GIiQazg_–C]ss"l)dw\vS0SFOGlg-nFC;{!&AJi :,3u>٠pO#c77HoT;qڀbTf pC QG!"tJTģPC&SlbMl;/z@13CU.un:e07smmɸK-Y v>à3|ysB2ēNgmRỿ`ԭC_Ϳ0W 6@p8P఩Y1aaaNy 礵w ><ɮ2sz~_VfG:K&5VͻuGHPϪzDޅ (E^9=Lj0AI*rB(o %9>S\#ۀ^f,E\Flն6;зy6Ǣ}gqi8ᜌ>ml V'H )w{"PF!J n>B&S$9ěrT*g8P y$۲35rvw8}Y{s"n6llFiM=VEMϫ<9zI@wJW2PTTzIrZ%#c h̹J@&0BPN!Zat,KQ\8VgMOHI֙D U ĵ,soY_ilUW5{5?e'T ˪\kn-, <4EjhaeRq ܾޒ'BhfMQGq,KE 35"טP5fj}Ju<-kT%[wd:SIp?;-x( :3Pĉ pޒ)<^r=^b<^r<^r/<^{2Fs U*}`m?9&܃\h"\MH FIT(Z*Qϸ( d%/-˭s=qm^, +b`ZZrTT A'm(SXvє" (.h>}< . JN&۰aK+\LH Vcx0(on>rCMMMdA3xƓ(\TF|PA[RDQ"Բ 5m !G F~ŠC4@E$,0ˑJY5Cn: FaRn:هʵ_Ÿ7B0qbюr20vfM˾w0 ?` K:Osw}Δa2B4i>L:oOn:)ץP t:y^uѰj#{~&yQP@v ;wZ7_L/NoyQ@JzzXDL1v՛ ;1e2`Z].{痡龇ȕdKسO啺t?*WSiLyɄ.J0yˆ1Uaccs9N!0>[A֢yN=kJLx6<Ġ[m?)p{;zU$h_gts7XvmO! 5r5 jXs56VSgW(RGa^3pпZtdUg7qVFnuȦZʈzY${a!!bp\MP{Nw +WBXRߔrA7 C|||x1J_pvA4Iv؉W:G2TqSVTUSU57;7:G!_^xO/ggOg^:Q;{^ ¿L!CM$$v/j Q?uՆжUs#6[=mm6˅Xr 1 vM_4 ]7#vȭ6:,;}Q;IoN "yaZ&%KH2e"&%SbўrZQhr)JON3:6ɘDrAH!3Y4duDRe2HS uVu&0ɾ"|5X5= D|/ELdlj>;{kw\kt޵V<M4nzm{6Q`Ͽ+i߫br-9%uED6XDs!2΃20&p$r,@y,k׾AeRSXC=7YXzϱ?9?ȳցsnd^Ew^h6a ˾2)'0c&$XAp-*}=a^=+CNǜ-'0{xn7X,ẛڮOdJ$v-f|ROM2q-V}N:3rA^,\lXi!l{n/jq{n/Z6o.5g{l/ Q4d Oq$XFu UZO,1#BV-rMzͩFΘ-^ో0 c\qau. `so5pA­Rii !',u!XJM*jDJ9QZ9x3E@1& 3d#2I"r(!^QGJ81mqQG6w%[yix7Y˔ <u fPaՂqDRae5W2`v՟2t.iZgRoi3[t[gڍ'ژSÕ=]"ew9?#smA  )$Ť(is?m`E C!)EREah%xIp`k$r=eSS"<19w+5L򏮜ݪ*4,Gv(TS sύ*1oe|| [M5qql'Ҏ4aLԮ䴘dS걿П@%\;s4," fM;ktܹ-kJXRn>i$,D"Tx La#/uZ͑eHbFzp 8M(rnӪ#"( -J KH!uF! t~ L&of8SYV=BupT>ZiTL8?@T<~%"^`r "MdU/[U/^Ɣݨg<**FR BJ,T O=璷N^ړ?&$(LJpu u}Dn%m2tBLRwz&'@s1xuOa8;:]n{vhBZγ6ݝ'=pK} xVѱ7r8Xbv^`ϛƵ!:YyT}$X ˭͟6ǜe e`J|2Acf |L^VZLsS<딡hY5|O3ua(zOi㫇, ~=&uD-#ԾN SaT2O S2(P 9+̎k,61ξh[ŝ _=ypgܳnЪukysҗŰT# vn] -߫.*>+NnuMjDͯfe+g:&%v?d1FY:ϠcR9ɀl 5\g}z%QŒ.mְhT^K-QδRT[OWڪX Xl\2խ 0b$OE6HTIC7}-QA?m!5ǧMQnhԿG8 zHzÌ%RHg("ȍz~mz_RvT,?5~nO3~Mo2/Vu&'6ѥAZ3)TfD4:D0 WTZQh@0E B劍[`h Р X1c1`aP򀑕9DAPXbzS MB {N7<)8M]pp5|;)J+-A#OQkTJZˢ&yn)W2)@jGxTh,<(&zg8^z6BO< yZ`92Qq`T"J#BʠiR9"O]Y߯mqqR=7Z2T`"u!P.5C>hU "OK1B_ubԡ8$e.m=w'&}O\5\dH O(qr1u&iX,6$?: IaB$R`urD:Ho0&LFM5 _K~wN~VU8+22Ew,q}R"WN-CG翽|s^ȆKß̨{iFUeιƣ||u'D,z] Bay > ;i!rCoTtR 2C7'ߧn_O\";;˾j1cH#3gl9 # $L{nj6NHI/g97\zW%W?KߦTDh@UPK`>LzrKMT tg*# O> 3OoLߤTiK\ڞIĜ O?~κ'xdw-y0^&DS r=`z)#%9Ι>WԒ" gH,Rsc*߭8Z1 lPK`IlC4mg4{8(ӰUytZ٦j@ e 9%Sx6hbP)|{[gtr`o b&2WhZpTo܁tq*\ܰ& Gmv#]tmANJ6%7W;λ7Իb'"}hwW]=. COH#-#&ؔ2Ŏ** s]Z6' ϫivA >UH/Fff2*̄W12"$Q|Djā.'a:5Gmg- KuxHc[RFx&_x TZ}Ĉ2Td>eR$@|.9CEId5mZYK&a^!X3*1j 3ꄷۑٹt wMmM1a4#s&\"ceM8J{LybO W=QQ)p WOlAS.emi7{Md' .]D!/{xX$gF簼!O`[xٴ/^M煽 0Ȫׯ amorGT ÄU olr5p W͋|Us4JuNc0&S|DzC"X\ű Z&7$*kPo\%:r:3'QWJ"[ 8"D!̏ \%jm:\%*ik+ R` /4ML@&X9t8>)8AR0ky{8toz":+W.oICUx| :}] R]bEY?LaZ]nNP٬ m xi;:0l?䕔\y "2CUpQKG*iBv2ܿSrdIǽ}%ib%j,FiJr#1<'FFh!e~2,&ߞdߗб"h'}Eڮ ۚ%juocr.ַc1:&XȥGcZ&jkվB]J1`y0ƌIVECݟ[q@}|Gg{$Dĝ4W|1EuԦs|[XW;M\Zs|S 1:Ww5Y4JĔ͖>X!L{Frc'x5M 0Jize9aq[;327tÓ%*no=xS5yK̕^o="h0m2Xq>SV-6ٲAc|In 1,䰙׼)#;tVyyvYKw@ULA![ a**g(%\*zñ'6e";P epIpw3|wUe5jS8 ">OOկe:srE y4kTfYm8%^**c8ek\+.t|tq#ɿgćsٽŗ7|5CRdzoMY$E9&; Ȍ@!%$[raH6-@B9XM>½7$"̓nS$c#цsйGDJ7%WZH2x (Ih1@&ܘ0˥X[C 쐎I\8I'ݘ$R!'?4q̽f`!' ȫ :pZr6>4X!ܑфdGRt%(5Jk(2 LyC6llTB1;B>R;n4fƈaQ㚽ToMcGRHYB";F} %,o2'O/s%[J\M@NY^P%Sc)?C'!T{_7wC9*`o[PTB3fjfa$*1WpרcWÚHF7'r'4α'9fqu4~1b滌2{&PK!`/U -% Hjk|ȐT X[Y_Zh4 <}Z]dBT1l0uÅ*ܜ*f %:t,Y24,Z&3 |]ZPa:59_ D*Ur{Fare z3B٣Yԁ[ ip~l]GQ,'4@y!WP"LJGu0cHm̭a\xKD\ZlŽ- YZˮNJ CBpN`$)Yn(DۈE:&c@n7 (Ρ7\ߛEQL1 v`Pk:9:"SQrbM +hBGkO2XH`P\ZD2 W4-577"88h&ѭE$ؕ/P +Z0q2M!lV"J ! HX,@R iL~RTcRl@ v{N0 掉\00LΡ4L8r!% `A`ɥA@IVz bMp1-M=n7XB#&x'd)(`@Q< Ɂ]VKCV㺗.O6 ~t0Wz,o 3 $Ɨ g5P݅rve]Y 1 dc=WVcG=D &胤;ҠCth?bA^f\1OHj&c(QSNhD"0dT; `xaPpUz nB%!͈ U 3 73x(u@Ku'sU2G@}1;&Lu1 'z4;` KD^xϠ (z&Q*H&zZct^[1@$`w$ti?XGk\ /1 );n][q:p /,X]Xd:P?5FrNLgPle2 w2}?XwOü蓵,!Jq:'Xo.fh2Ǣ]R%5 KyH! 5y.p JQ#(nD@ʀvo a--Rnk 92vq53cCZD W P(Vݽ`yd)f ce@R/DAf2xk.nc:KLLPZ|MP#;#b/f`;j$j @]x'T3=8#ۖ0ljYu\t k0h15gB9a[7#YO'#nnHB0#lS8p0)A/{I˩4ts \A7fh[&o $:].S:XBʸlk*҂gQO(ZRz ˬƾ Br?+EĭSN bn%r-"o Yp[ 0p\ k'- A:.X2'R∌ 9[.աVҘf`XKEw,&:IV|_@ꦀk5c $tiԚPAO }@E(Goep57B8vȃ%Vm5mu -gﻞjaVW;:q$ʗ;t3E-hw3>;ڙ NKxϧm_>0 WO}c!Nb8lQԇ0QOWY@_xgdJ^|y>~9WȾ2uˈRw/{Kh#(;nooOi\7G\.55.n,ha$m~> 6O~ Oy¥{'\;'8Uޱ<$yn4M\ 99ΖCÿ /UQKs"q8+?|3?t懟_nv)~}ڬnr׻X6٧E$G6)Jm?o!Ghsnɦ\愾l"-¹{@] GTo0츆)_k}_o#xEO??/V8~n駜p~xdDKm6Bjf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!jf!j0]y\I[oNwoNhPe6fi )rJ֩ɵm~Bl]} frIK; IؼC+bmQ&R0yNqˡM__]W6iմ%WƠ[I`G,ﯼ<溄 A=겄XVݳJ=Ȗą`xrGQOv6A2/Glϕj~XRGf¹bLJj̎uAu\0G:7<:wؚ~ŧk(>},l'sh֒D?jn$ t'wF^HZIȂd̅dY8_:AwxdyN%E *Z~enն=HS\22M}uNǘ,ࣿ߯mIT"_K8fr ]n R6NfƙTy94b ל<} (63 OקsO 3K/wd/wTX߼=+u5wxxӿ nu'xjSۛRwWhUm49]*GW9Urt]*GW9Urt]*GW9Urt]*GW9Urt]*GW9Urt]*GW9UrG;n~IrTļ9#GwYƳ#JDrP,r9z;8i'y%@s|%8j-oͮajoD''?~h+U_= o׷t*r6iOBg>psӴi-SHQc)ILCsroszB?#=1Sǟz[lOu8=Yub>S_z3>vXwHKj:~_5WMjUӯ~_5WMjUӯ~_5WMjUӯ~_5WMjUӯ~_5WMjUӯ~_5WMjU ,w*{M){Mp- \w]\~8(B-h%Y8r,cAh}2Z|1oޟW?6d@?6=o1fYgoL]\ݵg6B Ev&EU3y#mx̼X٣䕜a2m)o^|5r-kӳ)|Gހ]5\d^'.ќwu{Kcs'_-[er[^T*l>?W#viKJ\jr}Kb+-'[,}x2u1;ϮRjf u>X( w L`!DtR]pNt`4r )" F8>!4'E:Lt2Сg陘XRtǛjQ:$}b>?Xj&4 (J<)\ [KYh\xS#i61B1) `PEVf+FR#$YnĸW bx`hFIEXڀP򀑕9 ',PXSs VCg٭J!iv@sK;<:Km.c?E]TS凖a2){&YDFJJ+WJkYL؞'2NuĚX `zӍMfW=ә_)rFTu/=KMThfTG,3yzW仑4CS:Z`ƕ¯B Khrv|Q~.j:,x,Pqӆ-= / <~â"bM^١鼈{NqjDNP3rs /g>bj "ǙRqu %PƑl&2Ŏ*(GR"*N,T|~E\t57P'g+0g/}$Nײ<G4ۛ?k\qXt_kY̙C:P瑤j2 = 63JfjOuVe|Z]@1(<ݔnKyƌƁZ0cĜED8]XѷBFQ! V^ߝly2Ȇl8rUy(K eea+߽h{(Ge)}S`f/>60Va[NRWް0v5p뮄>֟Gi3)=_Kƙp-Dz9V\ [_{BP"q׼w3_9Ӱ!VKƳN(aQ4gª\{Fu;gx5050z Pf `.]e8>xE1HԊP+~WJ̇[CBkIȶFhFss5;o* mޫم?_Xa.8r䅑"R !ƜAp@lr6&ԧUNcFWkmp3*h2:ڀmv 25QP*:d:5g ވ>yn-q˴Ov~ldMQJnW Y!;,+U &7ܙGe&$NF 7. o19EV2N7!ȄnB3! LHD"A (ԡNG 3)!): S{jE< XtK­!c0;cͦUg H׈hNPH>Κn rAx W~d5yX1vavcv`mU«Zy4z@$ @O&>$k}ExL^EDg=(5X'o#?^M+s|ǴV Dǵ,EUвTKEs)ͳj^kϿBf_ j|ū*>UMh mMu؞y1, u sEۯkrAc<`Y7 ߙ]Mi|PAv÷YZj/]-(inFW%gd$k|_` wo^⦫%6vd9q!M(m>}Ojry8=Ы}yQlBH#a=]D d, *fC(*"$3%S;5rE؄ AB0ݵc(N)A@5 ^jFulw֜=UlFvq%.z ZJ4W,܋]1xV1=kiA9t2MxJqsb|EA0.Q.@TxGDJNRRp2JaQiƝVch:&B } wX T2k.aPyO}ğ`ibrͬǶq+&#sjOol/ϳ4u0H! FIL F`ranc:&(n c(/6I6aqDSh0 a!47 o&!NQe#$4^C_;$W(x,hIa@dg5"eJa:4߃}.[:)' w 'kQg`G<~<*@m$G4pV`$/2"v("Gctt&E=xdpi7Gˣ0y6 lEm"A\:$U\r4 HN]4Qsbtwbp]ԏ/M66*iF FJ mQΨPsR`'T8/nAJa$~k9,!arR0>[Q;rST7NVƀȢ1cQaqv^,ˑ]q5wyvrnC!R(֚\e[5[Y!q;JGjB봒|:bsV:d[m}%Hhu6|97KE'jQ1>٨ޙTdK5Q-MU,㗥Dv3'oҎRԛx!eg~5Eխu0 ԟ.eb|{{WoNo.R7륌-߲ I< tv= ֭דn:DNbon'sbKs$ !Ii5# qXsBsWCr:f28GǁtɛDQM1F-= Gʣ"Dzu:9(dr14ykb+t@!.)Oc.*5XP2Vٞ\r =~'tЮ]mXl{2HK!kW3#5ƹ`1X>Х䠝mI 9Zr;Afx>u颬:êVUg8jJQ-Ki^R}2HR7>?Lvq-rFAg `hn)1H-h{((%"֞uOhA* (m&X > w!g4՘'pa,wvr!ٻ6rdW|]m,lf3X`g ;G1,y$9_lYnۭV$OS:|ttn*^|:'S 4;adQLX#N.o𮑐u6"3WAp}?";ҽϧsy S s|061pXK04Q%/CNiAZeˉǦ@E}KjJd.r>rESAcؙ9[+ }ҷtӓU٧hI>$[dY袓ȌBDmYz.vᬢֿCɌr<*[A?+^~J!2 1 VraWS)?xvfO DKq҄K_;';e.w2MsN^i۳0S{a-1G.Zˮi~}_;ww| (#62Pi+3MYǠ27iMZ/`Ec)uP9VRkkSXM.lE^#i>Hus72vU:zƮX(;BQQpeQ0 ˞R_q?5ٟu H%Jyb#ߒOLa DLHTI)s#"Kz ʗAd%ɷce1hlLjݙ9w#vҚweE`o H,Q_.,x .xL+OhE̐K@CXE 6j"X,cj2Ts7Nr#c_+"QUDx+3Kc) dSt40x5DqcDʡS"( 2Dg9cѨ.kE2rB5jnɓ'2B]Mtf,HMbpi^+.qQW\x[\<#E@!WDz9 ^Ƞ} 2]vv7<|[e[ڐtU2os6b Lяj{l$A,:/IZ&$JcjX)GpEkWpU{X{Xiko`˦F 5J~D(npXtf!zvMLrk`` LV5bgoo>g[Mxp1`Pۻ[+a^k!gYk\羽iLCݛސjpLmԏR^rD?*Fm^zQ֎En;Ԑ{P C*}P"ư|Xj R(kާ=*ՊY"EGp8հMq2( 6=L?oˁX\Z otw(HOlFk൑; U+7endd *8jtU6 jeXèʏ2//[vqg3n<2+7M1}hZ`MRѼ@ pa`boઘ+ܾUVNTzp%S \epU\2prJ˭#*^8\p"+ZDWJm/p&}VrN>8it5UQ<~sUdl_; 08j.rvmpi#&1ΰFkk:TOJ@d<REH6ܴtW_kqo^e39,CrtECeы@1! /+%E\. K./Tx/8]j?|_݁~l9M3eDo?]׽_lCݦW/?zkg,*V%٪$[dlUJUI*V%٪$[dBUJUI*V%٪$[dlU]dlUJlUJUIbUJUI*V%٪$[dU{*#2(k7"VˡEs7g}?QSECk/g2ͼ?4Fsh|nd:Ƃϙd1ڏL#wQ#ONln}jCN2=c&J1;G(/tVylAˢf!zw3P1#r@`][o uB ?HyHK]{静eP M{@]%')\nC"[)Ko9M:Gn2LVaV$ F4\Dh#GɅ,B9]|1h*:}_&-ja* "`d dc2['UtpNs4P UF0Pq,oax/9!`i±!Rt4BJq2$8FJ R۟jjf yh Sy"!AKDW~1GM3"mY3͙ぴ>jm8Eu>D.N KƵv" 4D7֒߼Jmk^bcus1+?YR+8i\܎A8?"x78gv3Nd׼X.gMe8Lψ uY}yu}ay]i[~fko^5O0n0]Qj7ezZpUd:;xo\?i~9OpŵV3V.pOܗ27kx xHqTfsnv~i]P~JooXpF mWΆӃMT+Y;h+6?i8.Ap *i .hq`~{ _eϯ^|v:9n VƜ'<,mO=_谌*e(m([V$P4rrv&L`y*[xNt3Y_b.;;\2m%ӻ캻Cn9Rzxi*t羉dzMƢxCҶ#9t=jBεTG,{cGZ}f_>wt}D.oھvOD.EYLdsȸ'"ysI3D2fS"J>ذw2VQWWc\K*b|p\4t+'`0mz- wN~6>ƟFMeۻ T^b{.ɟ.B.,쁬bs8t|dEVkmamɶ2gbZ{1[I6i^+6HוeYTҪJUwoϼl5ig2>|UO' hE!C>Hӡ2'2:5PG]oa6m5[Ϊ`Б;ts^tnտy&P=_2ސqq*96Firc=z*$uAKZt[}Qj"R.Q>DՋ|{d3K@or%pfcٙ9[.O IJ;GZ۫wQ$<1GW+yDY.gYiTAyZ0L.I402c,#RvÓpÔly![l^|' MA:p. pD@5ҡWV@F-xg(TDxD(}yэ+0T`x`BK"@gz_}ņn/2Iv7N0>dҲ$êͶ.YUjeIE>ɏ}&!n"a;^j7 2\Mz]s=4WTظEo7n<u2:oϋYڛ}LJzd:xK |=q34Id(W+qӞAyoBkqĪX#[(Çb[LsZ_Ң/0/- *FT"qJr<#c~ta#zb[mvvx@u:Aet=,q̾~OCʠ{ ٙ1x03o RMMxw Tu?ClOeL+6xZ . FT[PUj^ ʄ&oUc^f0ѳz7Z *;nìmdu|t;;"ޡ!r^p@ [ȸN8Nmt96>G<^3LԲ>kE,j:V4Os@bmJp\SSA4<$H#ЧԨ}s1Zob(6 ;%j? puyR2 ‰qּw:  HڠA ǓB8bΗC>3%X“ nN'V:X\.oټk p 7-uN,zQn4w!p:@/cwu`sKhRS dPfFX0g6g^gաq4|87|#hv{RFN ñļv.mPփ)x qX3`-N0!2jvФU9O²]묫NoE=-`386b93D& 𵦁:!"POIQ{\.;3P 80qY PQHQ9I ^.{mplڠCƊWNR ,2FOHEX\^>v|JfӘw %'B02t&N*#XCG (u5FY#KǼ6t: 蠵^y R0}did:2ɢN ,sf6M$&l0V4%$^`-=Kul^_i`cֹVV9&A8j$WӔH,hĿZĿ @Gd --j 'ZZ`Y^5ߢِ{3;g mI"8[wfj雇_ *)7T#"ڙt/]S"喤Ȭ $b>ɜB,XKN imP$(UY@'ѼaLE)vAbX܃ȩ#N~ ${_lz xq9wqdۃOwE/ű.$)C]: Yd_ ?Mpk>  #q@\34 %NY N:S5qkhsaORth+1 5e*'p 5"23;n2jnMQ2&gƀ.J&q"Os)59 T_QmpKhY w, g({q{=j-va] NļEY]3`Ӟ`ݖ-MQ eu}Y]XD :a) 6EZ[`aü*;mӆ;mprL*X.x(G49ABdYe;vD1# B(Q6b1J(0N # `T",bY#TmU-*@ͧ_&6g ɗ!@Em [q D+#ZufZfT]‡W=̋:O>ҽ |ѡzM9x7>rw\̥QYƨB4;l׫xռ*\GEk%wxQ*W.y\IOT\9! 9ZwX.Znsh3l'}N|5uͭJV"ȀR &FGτV()5'[C"s&aNDO,hn0KG1ABm\Zbo8Ϋ@}2'd t6 >}m]_ͻчhdv#pLl_6~ssz7zn/7ȖCzm;*{걭bwޅD=՚Ym(LvW]]!圗Z:!.F&Pr{+HYt* PQ.Q&a˭9dTgDd7"2, 1oM4}a8uķnܻSG-8} mV,sP w`&/Iተ "AZ%cR)>riedõu{CDrY#!DcIΣgu4[mMr8t9M(I<5OY_b}:Ei0]4 iq-k, */㿷f&3ESf *K W!yTH R4Bi e zZ:@}d vfaJ*a^isQSD$KM Z{-@# "D>P\Nmi ruAUJ#(ATZB$S&r& p=w+!ڗ[%/):X[`VBL'^H^Ն+֢X֚.m  ]W:1m]e2\BWV`t()ꫡ+v`׳PLD-A&ަ2S eT_& Il!H!E_?1{x^ 4A|Dx> C^QC6կOoTZvAaP\ ̎^* ގ}_-\)E,ţxO=x ~|p|j 4BOI2@o{>UAUIJ)LI8xR'x eߜ> A WrQ9e}(bmhX'7ٱU|Gx@^*q(=6X2\NBWOW]BXJZDWn]\Jp[*tQ*+;GGVkM W>|(i Fp[DWX]eZ$tQvtzJhN3ujhi:]eV+MEt]eڴ&3dxte(´` Wf*5lK:e+~`+L9&j?2x\%.KWǡ5r/9 %! s/Gjmoy% RS" ]rDf% 07l>+O4n !{0}vIB:!.aP PDc~|֘3Ϡ'_%w%) dkMr+RӮr $W&as[#!!)xʹzV*9ҳY!iN_qF)r*qk W(rMW%!S^"Gn׾S,yk*դ-th M^!]1F-+$0n]H\-k69Ttt 3)o]LWch/eyʦxvt"t%Y?j>]e={Wm+@i*d]FJF[DWXHWht\X;z=t$Mtk3e?(Ew2JEt oE(97it(]BɎx0h,++4m ]Zo`ĆWGWm+q`׋+L [ۻj@~{X>W1v#-0"q`).}p\y(ǡ5 |J4XqMOzEn`:)qIѥjx:#b-c"V:i'!Ke@}Ghy"aux&C;Hq{$JI,6أ3@knrsdR\l-z=KP?;TT|iAdt}䰫|'VT|z9<(yj?ۯjwpu`φ9 a)n>>C]B,QJ :0!}1%zrhN:蛢W_i]Ӫ *+(]=jjs"ߩP־ʚ'w'W܊>}_VF̂y}v|`1JF 8ٛxY ^f/KF  AR8QȤhb=%A읶FΞsMO)As#)"~&`voy.j#n>Zu|cR!X 'P4Mh'? 4]Y "* C& Ar!hSB."J9(M\ c:dT9j ) 1ST+:o~ڄaE&Acp1T^ 5uif4QVPfU&Rh66OY1/qa̺zG65loiCnnīw'!G7[J|P"]KmU͑r#RGcc&!!RpԩC G'f]HnAz\!Zw6*e_a@Tҗ1h?2Ai 4Ƙ۬HɃOQ@Tyx 3L1b|16)(a!| XڽYN8/ڟVeNZ[1/VԎZB+#᠒3T ^EApP80 ( sih5wS G F|BH0.#%Nmt5BDBp4zhbo;% NHwΛCyyJoJ`HCn{oOӛ)omV:ɓ)q%*j k™Li-/BRR;֮6:MqxRObi(6Tāp<$8R(^IIu2B_ }עAf?p3QrFvC4l_jCg=oyg_:lDguJL%XyEQs "GD!缣!6O6?{n~&d =P{`.~\$] C&?´s3Qv&q}!%[zph(1Ǖ?-|tQћLĊMj6u gĩNT3&7d/e}ᇁ.ТN'〄'\畔VU,Ej|'yOr|3IiGn㽛1}C?ϣ]bw?^/{ykBGϱ8wWW].?j!/<4`{p8(H/pzUOÓ\匝?m/_KO9wo&C3b5fJpޫ]F~e ڡ[ 1h{q*W < K&79*x= UbaEcpYΤ{pSq7׋ i^E[|~IO[(/=6\>?] Un\9LxFͣʶyb,xuf {ɉ^_2E):"tm,QȦtO3@Y7ۖރ(YLR({2+,9ci{}Lt#8)k_Ip&韐⿦tB8^*&ԿwU΄bydIf NWjodlxSkxnxdeIIUóGMت3l\`)CY'ټnf#h#Ev|E'pr<NhkI'DJu§.ք+M+2ԗQҲZ _9@ 0.Cb^\K l+t2T` " 2aYʊSz(́>}(89 Ɓln9 c~7\;u<~Jj& !l97"zoK3q J X(]dD 1e>N-h>4wGy S yjԛ=x(7?.]tu["4A*ʭ'lTz)7`s\(D4)B$L˔!Ya7Jp *86x&y|t@FȽ7v 83p\c U3+8?jmY9rB9n>ENsl|229%NjZ:ml Bl)}zjS:m|/^/ҝ|V^Fs}^QlyG V7sN&rS0}Gd*% g>ԖŐKb # iR}h@HE*'p"1ɜh:mеnjuBk|+ 5sH!!ȝ "D锨#,JrOϏ47eܯEz[ j9<몝:{IvZCvǽ9~}.m 9}݇y%MS_Ɨ!'9iioO&ƒޏY?ϲ[@f_ܻN*AiH Vyöw!PRulzY&| >(GRaP6X!xx'zvqiȢq{FO6x+cLR؇ݎX%UP|gb{vlycF~\ĥ\!SS K*$AVIs&"j1B<`ioL8*ZJ@sE"h6H]B:L@$*(ThY[#i g}rXL {oVvwmqE,_ YZmYVa9U O;-@HJN >oa=L24 EHjl@FGѻ Tz)|\.XʧYH 爉J/)s9sknVWa[{F4T9U/'IMpYerD9鷛DCȁ9~H,ct_2 s?O;. fO%Mڷg6~4c0ci3X&Y7bF{0VZC^V'%M?6m[/5ZK R⚜C@SC/=ٰN@(f5M:mUM:ceW&lpgv2#ֺVႩ&hɀ";!$DʂSJZ 6&mPd(E*%; d 1 {ZbsQ;zlz2{B-Mdl.:ڎOVrzГ]~JeSs*oqG_zlCܻbL/KnU"AFSbDpm2pKѭ4>yݶ,ծU*xS !i)s*ufmα6'lQMJkJrZ@~:)I,&B p`?4q]|@[eyU?Y K\A {m'ఌP;ZOy$fc9]? ,?[(hlG΋)J ExKay\.{yz7N8*yi}:\Lyf)Q|Jd]ޜEwq?5-jiyIKt6̜((mîʯ]^!ݝ)57~kۓ9F?T\|>__ÛursQ݊8gOV' 6]zyL!6܉N`MQoc۬ ^u(xoAywU黮鮛ڪ3 >J|i.tN8Oi7w~xQ{ow.+uI|Jg5~*/FַF!5oukŭ}}Mp߿y_ pc#ɂ J֗b$~#J,Y 3Z,,L O$\#m{8gϟC :r2 oMn!u `E'(ԙJZm]&3F6-*lsuzYT)Ď՝Ѽkjܺs;J|Φ]J\7dǣ[hcƇ6\;'I7U9jߪ>QM![M1$ f! a R6{#C)Dc~lv_Aʚzqq51k/{YlGK@5ː-eh 4W..!:ǦUE5En3$DL "\CCFj45_9n;jQ^Iu(*a* O"ZrÓZhnxmWsMnxzjw7k՟>Ͷ4 Uhߊ7y^=Rn6&}@՞d,>h0ŨrS9, ^ӻ#$WL!fm[$]P۔3 PysZF+k Q(9i!]ϫ79oI &^]됩uǽ6^jLwt.?}9łK!D2*GGS8Tq|V%Y+PVɻt6Kf߲5͞<>׿^*wϊyľ1 fQuhq[Ę5UKZ'[o@erqBuR%#p2fl#$Oc,m)s3'w+4{Y], {{5])kZ#Jcs[ŜB^Ij޵RY6+Ee{6{'=(vzrI"CCNRANP Z"kCC0i4Y%MgB]ҲS1-j\J24IF#\'jorj^pribPΐ6Nv*qƶl@ْ& 7{<Uzcao#cWB,# _mן2\|8;>~v3Kb_?pqqs6(NdY'Mja0%m9&B@gbk-c`g/d%tJ+ƦtS>|;%݅"9ؽyr\̡P1jHт}rE2=1ca. d`kS3*.ČI+p+[c]H Mh2@^tkrɪ0Y2 u!fR&9$Rys~` {LD3q$6@H!eoHA0Ii \H"MAt2x(_ ѣ:Dl1o:;P-s&-V:]0%i$$Sqzn:grr )(ٕg.ڑ#s@ &hWH]*2bPm"F[eCv#_CaocWbv*G\! mI&Vޤ+^(^-ַmjg=8Ъz^7u='M:Pਜ਼JPlOΎܮ]/h+ #u\Z?x\JT2xp W$W,:5+V̈#-V+ ˵\C-uRi7j/~{݃R\aWo]J)G\#pvr; V++PWVU:9qRUk*U X- >*h]#PZa]E" \*+Vň#ĕWUH \]uj"ZW_ ܖ]Nx2Çڻ NI&M-Ⱥl+7jR?3ph|tC-훯slY~uc:O&B/:LڷZvtNv8tMݝ!BrUƆ4Z׈"ph I%+H'6n`Vb7Z jvtbnPZz#*e_&k^Ծhx9XWG+½Jk#W,|-bnr (I0W,VDfC"fhk^#+Bx[,T5˭ƺb÷Xw#WAW$ؚzbW,UbJIiW{UM"JIW,׈Zpj:X7#W^B$W,P;u*qD W$؋z+qYFV#\]'B+rDSR+жivvulcC3`J98bzzRJEڵ^{yu5l-dG10֣MNki\NhX^5o #s^Tѐ`<nr-ѰZѐJ/G.Swkt-bnb~1J[W$M=bL-bÏJ/q SV+~>\ju8t\Ѻ:J\',Դς=V+䲛Z+Vr⊌+/"\`zWd90q圱U9,׃+D5bJWҌc:&\} W,qY|Uh]#*aW{t"\`E5bպY%WG+J62`ж\CP:1~(e* UĮXu*툫WRlRB<5JGr2MIQZub{\-;j֓RO;ix!sc=.Idž5B&yetj3VuvG^6~RanYpah7\aK^`Y$M,i #K'n$T2,U]}H^{7M b\{MSwax(N89M~KZ ]m:ENWc%ҕ!;జur%BWn'#ے>ꏡ+&o,:`UkR誣u骣?ҕՊ v1~)t7e0^ ]9m -:?6<:Z:JkW/DWgXUkd)tڣWW^"]y-> YFseuG%a_"],ibՋhw.x>]u]?}/RoOΚ});ŷ&㾞ޏ{̈́|'0Sw}mIPyFVݽN6TTT0/@$_P1O薉yxBxZOoρϙ)[Ogh]z?nw]AR?VZv45seUrޔhD.vFJ86#L0O>/74aowh۳k$WW|ͷZ]-\NHY@͕շ$͢Rp\Eq\5L6hbN'}/IN11jT I]H1bQmj+1s1Xb~bOu6YmHK۝ f\.6U*`uPD,ogM%DkhsdtDҞݜhX+"T{哧Pk(T2#s%f L+[`$Zt7!7/ b5ksI8iV)ᢛL$()[ı*BDh{,,&3i lv}cR֭Eę>bTN3 <ф`z^.2d*6!i(݁6l# ʰ6&]JGM)5 Fa?@}YeM,yi94,mv6+ V+ D,π@+Ј *I}?\dL:[H@!!X2hOCvx >,:ϛϪLjjt9J9UJrVs (E({πPc[Mɇ9m\ DHHHI?7ҲSh`K" ֪KF }2VWAHT"Ux,92lr6U)Y\U6QH fn$C)$XgIQH!Q!c%X&"D y;a*VQ/ E#*)޲ ,KBnQx *Uh SZø3uZqx9(#ȩ X.һA2hA'C1cm̭Lp9M=]qkZ6Xa 3=WW˦ pb'*+HJƬC6,#1 TΈ쳷Rw@AqZ*c:)8/H1xlC?l1TFbĊ3̆v6"I6pH0'X{T;0W}rRYdc̡]a:_{0bqvV^*~8[ܩf\ɘbM&h ڦLkT^%o/td髒ћI rth@htW sc ,wHv9AZYB'ąs {PK$a2 #Y }\+g0 c4/ # It_S4Cݚ;7C6dm(/NfUSY_ ygUe-m,93|Y͌ QHC~uy~o%}5xC=Kp0/ R mti`s^yחVE<_m.^]^L5@ŀ ԭmVlfѣ2A5GowO(8wT9YY:Zm+5fݯFHa#!e(h <ӣafE I y XkƖRA6v#`s -fI,c&#SA4S!e i yPAz{Nge V(ۅ+IJ1҈ᓫn1混H.zɈaUžcW((c4*[nFB++zus[sO6Kt.q z19ت8c2fnR<H 5kփ*Ti J>{ҵ_3B5d(9sOkt:CAVA%/b-Kp'^q* NRv̦Zظ -+ׅiq#&0Mz|dNYFi<8 N²oCIKjm…PaXﭛ5fc)pUjY.CC,2cR5PVqI8M]Xf+9b4F2켭G~u B`x'`wJ{&􏖛^}#.嗽r xG!TyJJ+F̯Z*7Wb\3MXrnʧ/wT(o~ {q]l%veANOn6ix~wtw?lnYW1Os{ӓ^Lק_W _t _so+gRL뻧T绕C'XNᄡٺ ›ж|O mܴb}-=Aʯ4ɩS|FUE&-]N%rg1@68J#%l:A3)ŷHgCM^]}l/sj}v cz}y^oۄ)|j +osːgn<(`ճULi#qXWnˮUD)D+bGt^7tRh:]EDWCW\#1u{$X%:AH{DWX] \*%tQR Bd]E?tp) ]Eݧ@S+R}ZUy_*2B(YW]!dDȱwr.=~hPv]=%zpSZKA]58<ôfU cM7K(P?d)[pC󾬦]K쥷r=T'}p|LZ\&Qx\s5 i52A^:W`,Xu}( [44Ҡ=Z%T&u@9ș"ƅ Mp0&ӲTj'iMLjd$Ƕ(@<E.ג5a?IKB]2)np1 9H/44z|#=p4xo<Wxr]D:yr'$Hv 0E"ZJNW%ÉN(RU|IWUD+;NQ*E0C7tbh):]E':A"{DWW"c)hB)Jtut%0\D7tbh:]E<)ҕ>90u hJh+} I ]E7O#Z' kR)ҕ&>? .uru(^G߱"/c rrrukqW|≮Tp>)Ff3FC1Ftob E[;yQtRSD'~R"&TC@Mԝo>vq-.;4ü[$Kimlfr}Xsf&=̇?zrm \QN|6 }y?9sc~+~@&E?Tq&YM~kKlW.:]KЌnS:M'wؚ pb/K|g|fR>Ner>\9խՄ]ܯ]‡:f}rWg\uř_|ν~ jp~KM4 WL*=/sK_$ԪLQh9Ze5MN0S;}^q~P=0!\ r 7j 2H"  R6CrLGElE Ҵ0t~WN =AhMcf Z8L;]߶t^04ޞeͯO>v1U>,}YMn>95L[07 q];+r3 s9Uv>ۧ8X|3ba^Ya0gnHծx5MKk:Zlqh]c'틕-q,%x-PYcX3E/.CVliՙi+bk&b6.gy?hs?[ga4FvnPav'}(&[?p?Yv .)Y}/1g!ӯXֻabjP&fcD/~_Ncx6ߓqX>kcq_\t{kl!M~3ͳWWgv.zsKq|s ir(߼.[#n#vUx`+ϥ(gԩ \'g?N;yꭵ]Dq8e)5a?1]{V}f{SEa1ϻBЧ:8a}0VGrslƠE|f3(8T;" e2A)QeŪpꤦ0⍢+EʘW&gǕIal>T>דR(Ö( M)ln>0<|/;>rx?`2V3)# ,ZKo+* hO/78C"#^%@(lE 0"{ վ(0!2v@Ā%.sLvqzII I+0pgYJaC P1\sy)/6,y{Jp>fwwn5w@7#˕ P=3OAk,$r &TY,hbJZo5M m  Ä%9 T٥KԳz|kÝ_A+Qb-RzM=C@HKemb0O4׍~P)?w;n0 kɴV3T`"S.Pa(I](Ba e0$T47 ֛ņ1q!C(:g7, CxMY&[w)Q~ qZ+00So6P(lEi~b'W,xp60,mth+.bF~82?EPMwcl&ݳjðt_$k>B5 ͵|ɟ#ݗ,[^ͧͅ1ׁm@ 'ay1X3¡o,8~&0в*գי,I5l5wO" hQSi8w='(4x])$^ i"*9BԖo寧Yde}I]/hn)9 ͍RPTF VΆA|gÝ^UW1bgj7%- B )U`f@kE}W/?3,XT툱E烙N;/c Աj@3|@5ϭ+^Xw|2\CL Een>td 1uOx zݟ _ԮKL@B_6\?22ձqY5y3-Η+&Y3eY-ŰK1Fl4<;hVA"妋n(=2w -ػvAb#\S.\U|ܤ|a6G-Aw@wNLG0M -7*)$bԄ;Ψ(w~|7;w=BL3660ra8L}=zXho^!p9}yXDKY_!Qr^!vm@P+.s"|Ӊ|їwYkji \Upʷ)%ovg7aټHv6S Rc8}րQE #qYDE}(D][s+󒓇qiܶ*gs6Uٗ-nH)JKR뵫Ocx%MAWY8䠁n|K+㜌DՉ/C=!Lp P9{:;b"z 6X)9o.}iVxuy"`s>B1PqK(sT+$aBd xRH)049=lQ :b^k/RmG}6a*uf|O5a9&ad)rl)rbe /g^tQlYay a=+80+ `S=xӖM( 3 k`5 T(I\kӾc.Ԅ|f r``m*wl>j{f8Gxsp > 3電ot'ȵ_n9Q./ Q%V 9-ur_pJbr*€7g_~oEfq@G/`^YB<=ͧ ]'\8+9c:SzQVӂ7^uzn[Ȼmu2GE_lWv41 AxtJjk[:;AGEҶ{U@}&$49d)"S͋!!D02Zbե1DlF>{|L90d8Zv߼錵LtxvWp =Fܻ `v xۧ_̺ͫ8ui9k]љ9Ny;"u)&^?"-z Ogrm,.3k G)!LNPJi~B!R!X9=h=F׸;6F*) E1&UA( mbLV"$gTTJ+gХufhw_@AztF7ű7J'/.g4suXy{߽染-Sn?,]cBS{3o IM/ onGeljx~&Km6$#Fg֨" s5mJ-i4% !Z?z &GCI$Sa:|RI1! K7#a g}ryjAF*a) @Mke,)<&YTB3dr>3[=@p2:[/RP ԆY0k4&lrl".(B&SFWHQe  ShAdX8qk"aAiV4Mt>}F9ފOþJZa?8d{2~c t_`z.<9xkܩZko}~ThvDIhd[w9YtioVSu)[lxz$j=vZ&a I ˡ#L-jflUfq.ƺPIuᚢ:uE[NiS>2/_|k|fmsr ϐJћ`00l Rj$Xck 2Tc/:%UU,S]aNPnbK"v"a.'abnǢa'kHRH\XhM|LQ䢗} \Z>,ƺX{1%P[Yc]$U| YG1!S&2LZf<{~7QhfqF454wb0,0kaHX(Ew p &P!/%Zb@g,Y4ْfIȵ8`#Q'֋n)\l%EX/A/zqg:&fp>1 I\&|TRSEوwnЋOE6Coa T:*lK˖9]{Wq }a /mdHl!lhǻ\Bd 5mFΞQkmzDK6o*%-W~Qb[!}[)g_Xn@_󦳾>fɀPJQ ItaC"P؂vE) R^z_bR< soty|=quwUƓbVΒrhyV M&d%ET `lأ Kg &q x<-a݇AR 6emBII.Y, 4٤TrJ/w6^=y<}i?TƫYM`nhNɩ}?8ẺzbSU2}wz֌->^kgog7nωa3׫51I-VVp0M~ ߞzi4e$V([GsHۆQۇ1r0#!QuN~9\-f*{$mJEV?i%!u`i* ~#Or᜿nDZ׿};re:y}}I]é:Gʈrzȓ?U옪eNbQ>:c!Owow߾~ݷڻ~\km$lCIǣЈ_NgC jho5Rr7qzw\ p*38 l;+B}9Fzv;I=wn'Q"H<LU$G!GbNK؞Y"z1۞R.]/= 8P |_t52cZta|&/6+PùNsͯ%O$l &aq;Τk,=jy'dxɪUdP}MW7[Z8+hP6&LH&*lLvlLRHfc:lL BRJu:.%4B5-] ]I$ ֚%CW֎i8]I4M+@kJqz+lqMt(iJ <8!d d2#ZLRsWHWF(jHBt •6BS퀒[:Ǟ8(]74+˒+D+y QzJmK2 ZE=K6P4|~4I/{Q ܨ6',:Ȟ=³gx4=59wdNUFY] ƾz{zKWD鱭#;ۡ584J4RmAӪ靻^nVW`vb }1-,7KV1ufAw~awyy2(7LsZUDex-<,|_^oZvΥ+} GL%O$Γ ,=XIpr# !b)^"kjK|!;R|e `9*b.JbB6Lb3V`_kYv~tJ(!D)rI hfʠ]~NV_93:F"[JEu2w {<8犢,ӲGPչ& RגE:X.8R1`prNVɫo:7ww\P1nU玼?\{4bG?S(b>X\WbP[Ɔ\[Mraa86By%MgӝO:qO/T2IiB4,gJ3hj3(itA6 j?e卧+D)dKW'HW\jYBtNU4%oJHcL ]!\jS+D+X Qb [:=If:+a&hM+@)j ~)]!ࣟ\3hi:]JY;}ӡ+47) `M2tpNnu(mKWHWFNSr52g[Wtute( I "\aR+Dx QjWCWzî] 5l)zvjEڽ[[S7{mWep"gw@^ :XTO(&ڦ \9sA uN(fqwvH<~톭p\a;HEۡ ;_Uoa7nص1_R G?p;TBW䪥+f $!B&CWWT *tBZtut &DW`d tA@+n+Jtut%45 K5~W..FCF  @ʫhܭ_uYMiy}˰U q;*&Z=d.U MV{&BvRz|8kj>@] 0hX/ 4SY7:sexdSA y$g:TIQϙUr QՊߪ*f^ dPK:oa@Uw.ULKť;s:GC3&v3iZur88n7햲Jyx}hͺ=f?IXCOXC*X;AcMkNp+0Og`>?]!JKZ:AR3n+̩Imu(mJ[%4ξtu: & .ۆduutea%DWK ]!\R+@+HADڹ̆]oPNƵvpՑPjn+]ύl\"GN2B{]*ڨ{붨2"MLUe8ɤjhNd6˴idUZ۳ %|ۮu)|D0"r:'s!yR [5bt>2b5>@jYsv!Տ/m}'Ϋ]-vZ&;XpA/$bXH7.@H51*ϽZ~O_~}݈Wu/@6xyX8].a;uu Dn]П f- ^,Z>nm2Z[Pjꊟ/Fa];j&I=ѕ:ϲNGեDžكk{t=kVꉕ CRBr(z͛*%ޫ\} 0A + yϣ39+yUr.@*G-GCUO૑xkeOf_C( u7W_x1"̿xT#ë1t[# +^ݲUXۺ:fcbO6*+F}˒q?δsK 2hgsc2V 7" 5;+-q/~>c͊0I}>$/}rT0cx`(nJQȩ)|p{m`*RfsgxU\$&Zm e>pjQpe7NjnMA(&3C2g'V^ `._r**9kzn] e9|6Ҝcֲj7w9~HyFuw6/TrTiI'gSo%,Z|M~O_c:8 s)P)Qy !:b],aO…֥d eT{0)#ZaCs&A8S1 *E%g Jv`%>/+\a|n}ʌQ)'>[ǵV0-ئ7OV|>Lx|Z֥t)S]J=U˾<X*+mdכ2Z:<aC7Mdzd|/{3 pc(GYU"^'r?]BW(Χ .eK_ڳZpWNR)`A0KP^I۫_ʠCgal9LkUMf_w\#33=F{*m?Zd ~of14((ˠ|`j hxq``p6t1l t+;y9ODyg)7&|۹`k[ˢ7qXt.BzoUцR"mI@C,T‚TAda -v!H ^:B`x^,w)UHi7E4 r08*H9n@jIbLHR RDuYI Zs C}RJ)o1SǚNjxX^C?cəӚyp'gUlxȪEnKHb <"PFZqbF8kvM٩):c',=0iˈv(40iG0KSVBX-aF8+jr))\[pqcFSP'{'Lݳ_d`. f2s0,˽ M{o^_ Gc2Ae*v 2_&jzpg<^ yeabX_ nZri6N~x8 :L/|<}Ij}B(Fg3?B>K=٫t寿NM5IMnf쏫0~Hd"}E7¸,Z ?EX$zFz5I; OQ_~(Ozߟߕ~?-9C!|Ӫ*Y ʺT[Y={z4sZw_n{[(}@pOCf<0"pwwK.R¸_:˔~?R83~8ym8=!{M^_>Pc3J57#2ޫ_)r ')ML^Z5]v p[/l4X LR3 )iXbxv wmA)7IT1NhţNf%C@" Ggײ }b )r)QƁ2,`SS\;IодbhGL,5k.ӽ!< ?`eH{A`'`}_QG?x<n^t\ Xޏ\[|e5ЩD:G۵ߵVR&@DD&pAͣjnDh:vF6zIrw 1;(0iTgRa8AP)Y #y2dJ)D-^J6Q)UWJ,%+ 3|R4 @qj%7ih 5vGT U=Q偞~lO[Ɯ$AqSimC<!)Ffl9mٷ[!:z)mxSpN s}NS{&MMR,\}W1W2:4c`bҹVpb#Vա/@C`M5}W_u1P4$?7Y1ZڛI6IOX2: cm@fz=.+*#R"%@T RaP ,%1q0AP8@2=n({,?*!P*8=? ^SUC0nX=<0ǭflpN,r ti_˔‚ M FILFƀ01y# - $0#HW BH0"ht^icfG4a di`x}zE 7yC;?)L31ͪpfo_$J\iV!qY3֟T-"5J0E>L=fOJK%y< OVQɣ@ry(h,?ΰu eC9K)J{LܝM o@ٿJ`My1Q& -,I֓^^i-.{ibZ|uRET&+‡Fu~)EIJ/fba?bT4:s4g??#H)nᬸ#${ (U&e+3[gUd^?K3Փ/oWCb@UdN̍/yU,Ջ]2F+ZO+S̗v @4n!p8VyRbXʨaU(bp7[bMiryT6:dӨMs%Hh9u,a!i`إSa̒%lXu*BL0TПn'BTVæ}.TeO2WwPVT avjz o~}?~~7?^oW?^(&$E,I`mm ͆04mκ^]mNaܟK[ewKAZ*5MSĬbGIz߸lg!.y̑#$GJ{iЧÚT+۞0.y=jf::r~SCFqM1F-= Gʣ"DzuZ9R=K9 n)%;G﵃7䵳Vu;C0K沀%E ¾ פ ?j:Rjsfȵ1Os H˜Ͽ5iOTN![M1F: lؐ N@SJx-UVNmi99_R,+Yz&E)Vm\0s,G^r6$Qs-9˝R 3]_gm>]D:Xj̷&j8J;HLIA K9_i~/FZ䌂*gRbr[wPK|D>qʼnƙ! بT'@A`%$ޅTh15NX/YʶRdxx\Q[d_}C}}8QcfYp^{7O8`B?VC}SUs $Lj%U:myk5ԝ׳)o&[:aT%dVP슦v37#Ng")ȕT~&j59@%F+ T־9M}^_m3abuMI>6vFdrI6Wu#"V9 iw[.6)ػArEr-{X~-t,5'ZG:<|g^"x2a>a*?vCPdr`_7QAL>J1yZ {`6v J7r=Q$r(@ۍJcJAC]ke,)FO5rezŝO-ÿlpjI>&_BF(3*!2Dڲ fB$ٜmmTMuL柭?!Ee&HVepz+Gj3q7Tzwue/ΆdYž4ӓsHYIgg],$EkwwfYƄovϝ1.U{޹)*PBo!JBCRu` o*3 e;N(9c"$j `EDrx`Rj5 t,-kbajĹqfXL3vB uO+foxK3ؽ%?~z1͋f_?q4||]2`J$MoQX }vV4J1$ht1Q Ҫ¦v6ecGnջf_r.bެ >Xy f4]PN{vҫ>XvKuү~ c#uoW*E_^sͫ^Sf K_ =/yl2i~DءEUVkk f 3;͛Iu>1+!qo85֝F1=Z&o\T12V~yN'#ZiS`1%{}cb]#Kt\\B 'ڬb#)HZ[[[ ˏ}UEG,i+$+rEB@-y]-dj)CE&/*yw oT= RGS}jwMҪޣ=~܆pU8biA=\U)uW_"\i\Ҙ*SWUZ{b)=\}peA\?U/wZ3Xu0p!jJi|W_ \9jGwuUWrN>8gӪ|Z7MO'8}q=xk >g%\rJOqX]:{^X{!_=f^@nn<$84??}\v%;9-b`boֱ^c!_."_ ktE]v$$P- =:rdmJQZ4CrW{c*ER!b,CStKr߅ *OYT|OZ, ҶZ3NmS8.(j>mvC*QQ`BPxQ@6scee#@# et!BRUT.`:ϭ8[%] ]N=8֘P$yK'?>PiyCԠME6cmXMyYrҒ qHiC,)R3@ <1k)9ފy5]? |Z i^۶Ca'g΢f)dA!Ah cs/%.)!ӶZ77{!CS(Vw49MFX6o$%mx0hLp aIO5ABD!-lS0TfP{xiC>!)aELBX'bu]6.iLڊN RcCڥ9祐gSF.bL7sF^I@Yu-w[?ku`j\0cWo^l0Q3b,,ҽSꇪ9LO e=}3ÆįXxmia®zg$;PSBwi-u>)}Lapqmpa*9${VN> ti?|h4vPxJdY_E/_}<}/fQC<7'f W;_yx_ȝ_R;2ŧ|z;݅ogxNn nUI'ɸN޽?Y. +Yש8^=1Bl 췎d#}4<YWlHIQS;d'OfwMmUG]NrӨ檄 ~5۠ԁ-'g]ѫ jH&jԛKhI^Mw[~>1~w2͵xŲJW֜DNVTx\~ehrqsc5O˃|r/o~z}~~W:~÷eFM""Q!1?^ dyCKr֋͸7jc.W9Bn4`Sq.!\OJi{*d/@I:#"XYU2WJ4Ȣm<ʂΔfRNXPU ۽=ޮowhw6ZZ\O{HeҾSCK=<:g}[" QCG㑮6|b ux9[iyeA<= [& GeQ,LB"@eN!gT/Mp m,cuu}q߼: yr> F{fA~# GoCP{}ߴea<4h+ܭvPKzxKx2 m+4&οFg+2J|6[6ڎaml{LL[K&]lIןt];m>\`"=iս3_6*4:0a+)sQ*JPtNE*.L4KWR9_PtƓyWuisPäU:)(X=x~k;e2Уf=|;+;69VT_L"% 9%%W %t:fO`.&4 I|C2TDT`@{7V+gEIZC*ȘU㐄fx6ٯ$Ss;nnp_|]o'Z|GƇ?~VSimIf6@)Hlֻ\荷.?2[̦ |qگX {,s^91s7WK(c!TDz(d'z銗* /dN,1QAL>J1y@,l9(`&ʠtS"Ey,&*a)i I檈 X1|R1ml&ΖwB1J'/Ş P(eFuv R(P[V]Ң"$ͱGFjty l?[YB*L&x0%ƆL D3t6L'xVӋ}!i';搬9$p4;b 6$)2^[a5wY򃿵kܩ֓R787? ߾>PTKGzC, ޺)UgNxv:EQsH!6EF)TIIԾ}f? e`=PJebeM, R8#c; iƮXhcIp%Q2 ǞݱYӋi8M>N?9b ye3Iٛ`0JiZcLI5b1Tg/:%UMx|A¾PXnbK"ƈL;MƸb j7ӎ}Amӣv` @5\H)j.B% {j($ dOaǜ%P&X$S ;&T뺉%j-6f܏S|Yq_~슈1"{D̎-`@f( P>LqL6PHY2R癉8FPQL”#'&ș=i<4Yܺ:N3qfyOԉqq:\>u6Ӓ]q7Ebwiv{Vk5Pj0.q >t}oZ2wQž>iM@1yF޵&$4?ǐ|"=sw#vu|Lc`kF` (RۘJ/ȌH&pYJ1ID(;G8BrnGHs-rpTRbUJ#r'@32p$L1*'UߢN:X-e֌#bD(О3E1.DL&ʌdh5Dhk1 3r/ǓO-z|8~rkȤ立2Oƪ>Q>I|9IVgSlcFn)"($qh u4hJ,Y`NH3iRԞ4rVG6U_U/1z! hN*#DÏ Hq4 iMQ9ҁƢu@2Bh«n:Ncl .S6)(&D/8q ʡ^D&IM)qhKH>%^-=$ CY͇O s@iy* I:y$ɅdUE &%t2L/UV\@b+n:z6Jk[B/wyziIk="1&DG P8XM"W֪ Ƀpu/Fy5|ͣQ,9ACF qPbA%àaիS(k'Bzo)sڷů7K-<}\t2V]7t T>\Ł( rbC0A)sK#@ - IN"=9Jcl/ qi|[RY~0Ω\AuȈT.T-16gpegEn!U|L}%,/z8P3NJd8gglu^_ϼ>M< ήQtu~^nUϭDG3iK2SH6)6캹LwBm:BխR˱fs4۰}#l]bsrz}83޶Y0Yu-KlzyQw٠祖d4Zz{doT.z֝?qR|mkǒzo\>+҂ǯwmv}svD>Lv6%҇X]=#~}74t&(RrR5Q'`iﶧ}7h[S/nAAdz) &CWB@I8 ֒$d>϶,NflE.wZtc=~>{xhoZO:åQ㥣+sv+‚QEMBr[5ڣ6P!^!凙:`p`!Es5D*O33̮B=+Co yDCNJ#}tU]8T$({&"[HR(}H*7kN[5J_)}jBg7MLj+RԴew p\ȆR n P}f#0Q)1HbH%E&E@ƿM6ZKkZ%\N.M=]ɚ_F6\lh\mwmoǹSE?L{/v\}3QwFwa:גbCqLGxnKh5M/~{ub4+͓NW?S|4qJs΍3Q M'a_^WhPhjb?:Ҏ'ȏޛS>tq02^{3FǟӤRAlOUÀ-*?-;<wdOg:. j&|z#MH+$R|Ӆ M__|ΛwҟUqhfWQzo8W1My%V&9'zHņBIPPǞ?ەb_Q"WRȑ>UUM=3k|${B^M}N~F7mlQ1.r&7.oL^Y4]v}&Afc_| 0h=^7ܳHut"dmóALmg&lTr4#=6Ztivfi# GN=r3//60!1]i/*%8**ies"*2a,EixHpӁ^#gO4nmرr&|MyacJUGI]$"F<G(djFSq 2-qR*8z5Zy v0'% U{#K5W:pv$)_Qȁ 9paw|?)X18Es&7z6&.cH֪c_n(ԦF;$\yKkoސk LI9#2zz,Xj9k1YxWŽosXj8O S^w6$RZXZFS=1Δ VW7;S^Wr V6_H+nugO~xK-I J..Mh8VH uu,[ZR@˭C>@5!-RZbuITzs>[E|q ݯRCiofWuM>-m\-<;=|W_ox+Uzbi3`\cXLz UՓ]P;e6 JǓ*If4vF1vȼa+qzްܴ7-xxîdpBpd*{(쭤Ep + \,.S,m,\@DsJEUWS,m+Ҏ]DS+r2p⊃WJ|RB) \@TГj+z2pT eLtpJQe(fo5wW}[|:v2:ۍKzħ{|r )?88샧{dJO톻}?}jv qvY_#1R'TT1/@$(1fGt<<!<.jۑGs13?]9r춾ؒCIDƆ޴ TdrdGjENg I1vi7.?珛=$1!>>>C{;V_sOo7Okp 3=T-쁛1Y2r]:)Km4u1XgQjYF5PBRUmSՔJfתIe屟a5FZُ +o]U`u#Xk˭91:L2ݜh}k5D *ckD FM4F5S)R6;D #C\}j*zl UZ+tY[PRyiNM#$B{]nӈfhnqI5CѱE{`:_{;"Z}8"n1`M#)f8ģa'?g(bC&S$s*0̿O]4 4fT{}bfgZeD^WUGtmޝ>m >MYnj(`ɘu!g5q(9ϛUE{rggj*u<RIIbUTw$ף(]OpWc[c8 Kh#JKk|sU.RHQC_2"$2m6c!jT'b>iiּX OA!Օkʧh(8S'_cciJ>{(R.I 0kjA 1Ȏ ўF }m.5뎼0Q#_fHHS`eǝ9- OY@EE{0O4?uh;GmZqk8*AĹU X.:r@2h 3±&6VW]S`1uV6 ]˘ 2ƺz6?s%ps L-FU e,hGR7fa=g5aUhSe+kGU((NU@pɔ2)O-` t[IQCDf0VjVdlb!]AА$8JqN +QIwP? TT!z@ef$XM2мZPB]܁pYWFnTzCZ2ȸALAAX' @zN!a@YP6{i]m1<B&nxIB#n nQ!6Ks ` 7đPI!0,'T jť#3QA@7V!joLEwf%RI9n,Y{yGD)J6NjA!N9 춺r{&!YUDI)bu+եgd0-Ƨz˙t̜`d,"1۽B6ф=r+ZQCk>84iZȠ xGqrn3RUɉFŘ@Qc i5 !pB6}عզ]`V9z] ޞIz|om&>hh"X 3 o b9PT8xigM:J@+JUe ,#(yCs ~(xV(39FyjN$\Pd"UF5 'pk4Xyy`i(^Bd Ddm }wy4S,7nB&Ȉ` A]jrm!/fsP6DRD :8Lv51EW"<ѳf !nK)eL%)5cEX A5DEWHPl5CN!BjF,F,u=E̓? 7e#kvq5lc:LMUQFiv%&HTyP*Z(o*"vTpZlwt X=ڳ64MRU#ѠTf% ᭻Ei*Zk;U{9322߬n$m W|#Kp05 ڪ mryG9w;7@u;x}L{uzqn8׍$P]!l`3FOJac`kNd4QGmCbƳ6$I#e56v('g = AzV1`QD{uR"˕,;d*a: e 3P_kQ͛F1lb _vw+Aqk)5NnB1Eʟ&oQ^1 bp cFYTPcD$΍-Q;L ;A lU?<6*RU Qc@s@wD37+Vj E5kփ*M3| ڴ̤y@LPZ CYGy~{PnB4?jl5^5pgBuE DOU>:jUàe¦f@ 2\HmfhJ7a#U2'YkO^ғ8 8XkCW$Эu ȸBqkqq^o6N^ƿP'Ʋ‹Wڜ]^vy?4u`U<9vv) momVSqjKK {RGRamڟ}wSܒ@, b@@߸ >8R@ J@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $Ngb*Y҉Lw{ Z\{(q=C'(N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'uzjIN 284q:3F+NlɋH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 |@NŃU% 48;Ҳ8(b'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q8~޺;O{ëtz./?Zڭ_'@K2.KCC7.b\zƥφs c[ ] /j$-t E^]z1t56t5PZ%t GV$`x1t5^ ] ;] ]=C hDWl,-=ƣwtP ]-CO/R}z`ՃВQ߇=HC&?%# ȺnP^?bdۇ^骠T7HW`]C`۝`Jz*(؞])xrW]t J=]Ar:DW h`QWl ҕL2!* ]QWBNWe?2& at `*p ]ZZ*(yjtRKv'43tEpخBps2*os*j1S\W<7n2By#br*,ʾ AV97Q<@R匂*Z+BW^QN?Qe2K߿ap/ }<6UyQ],X8[gIyOһOlߡv3(w%-hk{ZPm>}TH] itʫXgfl;]JeOWo$P`;DWԮ3tEp]+Yvm+BXtEqUCtp$BWVGJ'uOWo8p]]`'3tc3 zuF@}0JkRv3tUΨVUAT[+峯m `*p ]֧ J-ҕʹ. Z[ ]Й UAُ IrlntE4Wu ZmNW=]nJ=UYDk_,+.p}" v<""ӾʨN>@~O+2{e8╇^iq(GˆÛlo$lm`" oO@b0X^W,N>p^ U0Uߕ7O" _?~[x/?G.|$٦㽏tWsbqv}6/vWTsk5V%Ƌ|}|,1#uk"׍tj"O]Et5 O6;:@zPwT~R, &0 V]cQQ&t+&EUH,m@PzRʺjD*lΉ0Gg@SUb=gRb4. ΆvdU1w}hpF"ЗV7\ PHOX?)Zv;>S,s V~D?\)nϜhXj!{55DORiE\IElp1;9 ~kX–u]U9L5!hBR20ޫ (٥JKb3{ʤ5HK yқI:s [&#gN6dZ \E_QVsOg/+{1]vJWC;guY ݴ)ݽ\yWAY$@!s>Vlk)&4q itk<([t*z]C&6Zzro؃sY?M&-V\66BR,ceu@0}FRH$&B "0Rv!:+ ,xWhy1)!Je2{`I6HP'恂Cc<0)a]: R<3ITs#oQo۲gK?dq49=,Jl|t!Y!X0H }D+m4YBys2iG=>G]1|@GF;:Ea4 c$)D![*:k8'=9jY돵A`pKWl2zYb1HfJN*JUo`N WQ(cD+dU̿Hr$sD7h/5!s\\Ø#L԰#G g'tΧhb·!1A{`\k'đP#weLF|&@߄#F:uG \ GidF|\ 8`rYgi0 KWrQqNJa>va}NdT{ YN O248*~pY󒨞qK@"3A#8S)|ah f]ޏY\M7'G%h󨳛l^ ZG-f40ĥK^1KWgځs]ݳ non)=~W.ŻHtbvI꼑!e'hrU[2sMykMnUJbX_\1M_z{~>?7߿v7od_LKuHX˓X'C+ƛ MahfwWwy-mu\SvWb! ~0ͥtnwm%'IL|zR!0H)Q0G (*P.)FbA2kPoo{vÌtLFYp͑}d Lyc ʼn6IIcHp<*Bѻ\s5lM~_ýOw^M aUi"r}rCy?).\y:]VXW45Z(ڜ-rm| '!>O]]GHH哷?Ҫ x.P춘^fJ5,{v!ugΜvgvPe$G5duY4a@dZ{1[,(l+T{TCsՇeBz+oC6%0̦. [>T$˖ \qAo{ma8 D:eJX%oM1]҅ \<\`qX[ Z#hA¶GZ/H1q.`9O'AU# $g\Kr1;uTS-%lqAGV=g .i:km߼~õsV{9[MTD>a%'@uSg*p= tKŠ(( X*uda0ztEGQ^Mи=-P)S~)lf2U"]A&Hѧ%A\)sj1Qzf5ztf*~+op記^{C8X7<%bj \n0#(ZF=CDAB,԰q=fcPfx:k9e㌓x6->kJk',L ňFDK#2J C9c1Z9Sspe31c04h$"f,,6)<`dwNk#D !2{`?bh=&oNۋOGzSwVh"7gf>/g3ػ'ПwIxt}4,Gp+ ~|ůoZFʱb#NW]b2.%! ϢgPHhh\ܴڥYWh:rAm1|]:tXA=SevS`y)tvygoVo<ïfzNub0{^I8{we".| bh]n$K09 e]Oi*y\ Xm˥~yyR#-R %YZE|IxY բkƇhr)uQȌM~5n_%_,Qʇ3Т2UM'pKY1VW@qtN福*UCHsӗ&3aTL+Wr:SA8z0]I?n^wAh%_93zx6Jm6'Y+uɗzszTwTjqI T@/3jOU0[3C+ɇm^_[}&i4 OGd:q^i W[8;+_OR&Xt&xy`tхwEwh6,pw:[Iqpx$pm1h=w%mަ$z"sa Xuӭf[v|A1l0?:p q3P#QY[3f+㘈!'I B3bgz0]|Uƞ5(F_^9h:ąGϫ8DO2{{Re?PL&GR_]< ,p(iӫAe=\/s<-2G!- c=EݼdgQw孔W?//W7`я&8pʅtNQKX@[]ke-e4L@yM>jb w_[h^vT󬙖&Q*K=}RTag*tzMH`np¥g5rQx"iul_I&32LG2w4ha5$AxP\]_$ӀA. ; Hَޮ_xXZTVtr7mƦ&LwiknIBt}ʬV+}.ɖ39ڱ:[E.dٞ-2:t,u^0niM[gAҭsٻ6,Wa?`RUz 0fv3E H<_F=-)CRAj6ɖ%R$ՒT׽έ{+M&Q^\KfɎVWsvq}6< /o'jIfGHVsצ**sȷoԵ&08pyK?8%~~`yT2`ӝ te` h@F)X?0Uug *bBW&m+D ҕ"]!`Hg*QWTmR^ ]i XCg ]=2Zˡ+CCtk2 ]e2J=]}3t%zyJ(-j? y` I>: hYߕ|;9^rQmp>6Z.K8G:q.Ɵ 7̪#+GQ+[a>R+q̫pT>qpmo8,?X1qܺs|2}+K[)p'υ)އC >QоO拡ߜTΞ,0haa_jE!$sϋl:J[2rӈ!wqR  X_si? SZRZot?۫""R )pHL09s*ԲFjyOY,8ɕIWpkdalHE>>ԦA5T_\hw]&(As译H(Z3+a܊I'<4CDr]2ZXKH+,pStJh i;]!JITOW/ [h3t>Jv(5g\]ev2\úBW(<2Zˡ+3CtAv2\Bg>2J=]@@RW;p ]e]EҕK}W]eBWwgK+ (!Ҡ+K ]ez(۶KOWOBWR0bbSt hUG3JݫoԞUN uu`Þ{;3ϻ: xC)[FWR=]_\p8R'1)m喚IK6IVGPW[<ЋNíp,:٧6k`Y( $ yilFoMHʶ!DZ>dP;2I#~$d^R7)g6hLmu;jpǣkbA? w> -'mW;yjOJ!Bmtލ0 NWeNDI N%!`3! :2ZNW^"]q!8t2gu\-BWVv(i^"] Rߕ]e3th)m}0Q+)iE\L`Ou`jglx|,.^UGN o2,,OUn٨t&(RpR5Q' GOV:MF8;cVhvK<"/cd2~?WXYг]ODO)6뛥e~YV3^]`gcQH=q!%^2H;e)IʩJin僧nB4St2(:YY~2XR«#^%5t]΀L̠4%lV~mrYY3Zmisd m&['l`&_؜TͿX=qa-s.?: oJ\ ;C$@4́bzyPwu6tu2@a/ѵ1|6پ|= -~ǘ 8F2m7Oؙ qQT].K|g΍hM~6ꮽ*G໻}]ω.c\ZTb^no?lXLΔF1 q1p`.FD47jӨp 45%2:];|1z؎`Ff1h(L> bnOh].Oאfrޛ(|k3X,sd]a?znYN(猰&=LUςG KVʹGX²Š =6Y ` U猞sPZ_PZ fg.~GA) x(8]4u r{QsGa޹TG/ PHah4,:ʤ4%?,0'$\4)jOUD:BSF (qpxF("0 >MV3\iDi+GE>όd$_3[.a8Gn}Dnd/'|cRom2N`d 'hM)>HI:  V~taBEpQNkꍲVGΉ%6ր&DJ !蠵^lAs5\䵐Y@4!zKwZɢ&IMDjr5C;c8 F9bM橜{?RqIѬ%3ILIKjB0jB^"J'S(RE[hmE$ ⦗lD"mGbi^^&iZ!HL j ёzB,0TVȕ*(} sKQ4NSB9EeK9G<`TY'cMOh W@*ŸX+^<(hΒ4D*U$āBTJ=ITo̵Z3τ2͏n8/WK-bj Njt2ahƶ5,|[VݸGk;|/3?jInXS֞Yל+ r']ULmkvgsC)Gl0u2udNa4tf!,0eQYzr$?jC=rmdG~gnʼ#!71qC[ɸ:e%*ɒk\tI[Vï4OvyKm;z?L\=mB.Ɂ6l!Q{{۽)MELG&gS~ ڭ>pu_皕Oj\2Q nHvU^+~"J>~/]zP1&M`p%I(yaݨ]ɑwӗ zo.)pzׅS3K7yl4]O~>񤭜S Gc2AGVt8s_L՜G=6d@^OU쟞wѰ$V1Lk.-&Q}Hh<pG~ߦa'C&$㠙G+RrĨ>{}|g{V/yHg3\C̾׳8>Pߢ8CUC"eC4~0 P$jo.Yxx=vִ E횦_~yM!긝="FXSn*yBE~䤊z+~[\Ok\>Ü^VyjZX8@&_ p􋯲/U/\csŷkW\5zS .GLu/M!Ho|,>Ɍ\v΄(i;G#lwq~uc^ib-ʹ mM7jİt>'W|_]ӣ7d?6- " 4}2Klt"`9h+K6Ӕ :1 ڷ{O\е:d݆Ig `4oYv]c@Ytٚ (oͰl fAY CgӭcIDtaU8XM|A52A<'A qhK/>'"$4p Dk•#dϼuёU_97(  BLA!^'B/NI^bNgkcˊ9Y캨d½U8žQ OTrAwtnf ms3tQ0* J@,H$Y fw,֗#ܷ@e>9HvV*X=g?/4ud1~v PMd:X*ns+`y=BGyǞK]r[] M.&28$< yMwGgu|$yyW 1(0Bn.@{PaF1D-YY ht8.Šg1K/8y ?=PAuO9yyia-rf3IS8%#zV?o+Z֫]QְF t&CRQSSa!G{"Bxr(Y%zËՔp}:89Sis䲼*Zh"F*fI:2>Y+ S$00^lMlӆnsڐ-ӓ!?:ﱂEPAz"K)3ndKHluiY1:{Q."P)*]A u-yhYR-_ I -Ҡ%ե*Ii15FxI Fav Kä!` wϹ P!:h&D 3r6Բӯ^fJ3?^:k7Yz5e7-7*/7B\t!F 4ub Pcí_Zxlk\@nqUdZKjB! N$c hJt2BT\fB}*? */*S}?>=nC`.ko<τ+Q魈Yc6q8,'Y\gkڄΉA!%te`xvpCO9MmIE<0RnjA-*)U` ,0ˑJY7RnPcRnlR+:*_ unk_#+C"120-q;Ӣ.DXc㈲Z Sfa2u0k. 9j OksywQἌ}3ͽGԠ:2ѩVӑL\M(/mHtV $zU^@k]&\I{Rui]w5#sݸjxU\-JY™}5_-z3N{\N19(l #Ft`%gJFw{6៵ꧦup njasb.* gr% .Yj>X ڑ}uðHv6հQæ$bɇj6ѓŘSGeQgnuʈz>$O%.o+S-/-m7b!&jsySߖ\ +? ġgB4H1]~ul -nֆ*%xT+U\ o?ɷ߼67?>۷'Nq/i 95:$ $pcІЮfCs#6.MƵ]>q>.b" tTS][:1X$S=I/\O>F #2)X2$(d,%SbQrZr=a&\ʚ{'iiϝwlDFaF Hf8fӔՀ0>d@ùN':{ٚx;[<{Ok=wYUם'r1-\wZ'<'@ƚwŋb!>ꗞ|]^v Vf0DGD#1<Q+-<Y"_Ԕ)‘Mv]W]qm#lg3ijfǶEHDlhzͥׯꭻhnk[~YkL/դ=8MѸ_^6gEmǾըyAG 9}?Q />n lwN+aQQ8-˺p;^ m)9Ϊ:He.X0!!Agxu!w\!q9%6Po12: ΃"8Q`:?d4&e5"x /m+nnC$7OZKJ(!Dt<@-Q:2J VKQc2t}EY<V`ս/GM|<3f+=@TR8R^-ߏw6(KC)EtRQ4~VJ=JGJ#W%]dg *(0F}.ziP `l B Yn)wAGIE̐($dQ 6j"X1Y"j2s?֢~YqW)"Qu!GfSdArJ" `;k$pcDʡS΢P erƴ!&Q.ke2 rB57mIo8DbqZ3g,HU'UӹviV).qQw]-xEBȃB,Elp2 SPpQpqWֱ)ږna -ke2ųz\+v/ry~O(zEqMpghal<Oj\ IX~߶VtyRf<50P .]-amiUv kh^WSGJ1/F??~Co<MT4aUq^qH#N)˔ۛ["`n0j .&Xh*3K< pf\jHeUNG}]Ƭ{ZZf|x#_n{)t\k@K)ù^~Vzc>5'ПQY#}_޽^65K]>nͮt5u| ƣLJ{Ef%{Ymf, ~7?._9zu,NWc_  H&Q`# 'kL)g n)*`אMq|pxͷ) 79:Uau4:_ yWmZׇ7zWsv]W۶$s%GpSWgOZRX,tfY$E`QFeĄb{B iT2l3.7h.O(ƥK[uc?9->~@V{&TI|~b7_[w[qUn6}6ku͛l9wp >-&b.^a0Tm86CtT*xqLsEɞ( dOge ˠT)% S0 CAf|4)qza);-ƶ댥xYR*XV*lu'1 F&sہ4Z3#Qh"jlxex5y>+w ޝǯ9<]ЌX}uX>=$-sۗyb>IVκyϗ8_:,ڶR(m3$x6lX,CxYR2d .AB9sY& qsȜ[F_⾁X`uu;#Ax?>~ZrrWb7ŶqSϣL 7`ڧzg>ROmš^_42`ܒIq\8-<V"X3s-]fΚ|j:Un,_LOg,~E 9 ZEX,eBĜϨEjFX5kZq itk<([t*z]C&;lFύ)&jSњ9[y7Џǣ4DuyoopgkպSϤ,[W5ܞ'únݹiABrM4pXodF2%6$!ՎKH9 vHBL+ I,42dYd (v2 $tITFBN6 ڷa"{%V-8N.q]DHRɻi]jdvJ0LZ#Le]횞ωR~FOX}pLg Uធ~M N;iU3[&Y&1_}& Nӭ\AhU]\[/fDYYa8qXG=)]%Ⴗp+XyLn3{T0lY-7ڲjCհBlD:01+9C!PMD;Bc+b"6G$: n1-jL!ڠF v>Gԧ獃~UPEUK= ӷv@EZNGj[ Fd卉S2܀I47OZZX#iuStu=] pY,bAlFV>ȯuS#]IQu0 B$&2*k"[d&ƴMt˺33GzGD"ɲn'Rս.M:ئ3U6qy眝ID3:XVxN[ie]g6&ƫ;{ZxҪnlqe ќ?RpZԮNpZ}NNG eVI)C+$t'~)ǠW[{|탋P{Yfk#cDA\&"lur*pCD:]?ힱ ׹4.es1-@Q2Xs7#RKbr=X8rGزc?0lfҠ B5r %L* H*gTZ:^uЈWNv*^xj˖}[1v y1)ՕJ9jiƱ٬@fWDmeqFa/t*uTj*e(0ODye"Ykj <eI"&wqAWڇd % )J9q%F e@I>37p ʭshMw ?3T< j^>4I/C |͇ϞڹD|.&MjD9j-2: 4J$Ȭtކ} }wԛ=ooˏܳ~^Q:N<Db@gU>͈TqoN0nc8Bf2һ D"QWl0 |#`h#[|:?{WƑ ?wR݇cv7lx:%)C桃Բۀ%X_WA<9 ʣ {+ X|$&{Hu;)X9$e&#QHYAe3jXze4zlShnDVgxVnSQkKvgf'SE y, I!;O(uB!D O9Bk= bgLY.0$5IX$QCvYI(J1,,pH‚Z݂.M54L*I2>ƟCRO!YJJĐ\r8|·sZLf0;,صjwl)֣邵7zQy5K.YQF ",Q8+XNaj5#:Mp(*j"|`\*@$}Ԗ1eh}3J#c1rv#c9]CBaQpIQz (00 m}?eR:NaxMI-&}}¼{Vn9[ť=O`\KE-6Xn޶Yẟٮ/d*,:Vbn 7zf׃=1ˊ!+ˢPd%hz:KY-'f|{IcLՃS#Il~rf]sCml܆V.׍髳b㛲Vl >ZLL@-+QK`QE57V"FMsFV;| >~I^9;oLcO) B&VW^H\^KzD-gmOKQV~%/f`rFU}<;]ih ryH -i{Ne&Y9;+cÞam90(O!)˨NIF.+9=qDCE"ѴMŢW^y1Q[ca~:]@0E[ \PT@U ;-Q1䄥 cRiBqĩQ+NzU_y6 RbMgF0eLE"eSV8 C0,!9p4b 1eMȡM[aQhZ3P@,UHbJh fPaՂqDRae5WDKU`;~/,0Dـ) mSq~pfNQ[gs9GJH\hrzBŕeNF]D2s* %V(U+,@SPTXR-NNX#* tJx{I OˉB{Jك+漫4"` )b(!^nn$*~MʌMy&weIb 9t?VC=}*9(J7!퓞pefu\YP1T:/$h# AN&p hӄ9$= TcgWa#_vm*(3LYmIFϛu#Kn8XB֞]d"r+inv&5ۻN~T n>w1YluUz{0Nm= vhe-YvyxwMP=hK=WU.x6 )^e:䝷,d/딹!-Xsjm׿H"NFX]ہ-fJJj*} CR9Xiߍ|߭u٩wNA^j 3b!Dt.o8En15E.y0#(ZF=CDA*[d)ˋRN0g?gv۵N=fR{ivb̤PLAt0\QiA83Fif뀡C6J`]ČŀBFVzt!NXB!cx(ZoۄE9ݼf c'xt{b~fveV'5RRZ|-eQ<+Klhb#<*l4OMfW!ϗ<{h V-\F:pB *%!e4P$TuS0 Rg</_XK8L1F8pGy!@$f夾Z#YXodv_␔]D=J)*mn.>YH_*IFq Ѻ 4@`I:cT-.ls4LF a zJTZHDDx-Hc: ,#6POZg>Յm92R>tvmoSEOяf'LݓcR[1p.hN=-|?)c:A*@ŭweqx1ϰ'o^B0/hr+x4@p4L,Kio?Νh6rAz0;0sc<>tzmw" L?hOq۴3S$ *?Q7C$}~\w1}ЅqK+G1Ce$ :/0r@/Ypn>IгIߎΆp_}w5g(ObZQkT<_^=}X] 2`P{m d_T'!_  7ITgpa>ٱx>ױ =~Zf8~7O'ݎ{GLo@WnI?{tAHW&O- (YF17q~soZq i|m144圸9] $=W/f1l I uֿEZ7 Yf ︅ QEیއa. MPaѸhF[*{#=Tw6*< .>D{u tӸ?/GHEM:G?t_ %v)7uC,8%#977u0ޣqb _a!֤} }3Lݓ{z5"ĺ'M7No"vu}3r {./O/<b{t 5OH8 6lJmbGS4i -"'Ӗ/eD߯x] xBDeu,LF8&bAFp昏1݁^Ke5 -uƁusc+NqPNReLLOXefƬӖ$&yϳlf/?߾k&oߤNi޷?;I"Kdw*r,KpyHn7>L`gښ6_ae/g!ͥ*?xI:ĥK5E*"K~A Y"׍]-#۩eQLt2x54ϯ`v :Q17\5)Dw#l*N\(C9rҁN|yV GuZS?NaqF?&?c5s0b=]ln gw:4i[.K J;+t?d)RtR3w/_D(EOT gY' ; P~u+PuC..6)i7H4#~HpE;F \iw"PbzJp&gu\].մ%~Qy DDpz;;ݦݴcyD=Bϵ|ś$spS_I \Iy1X |LG37_m_̫I&r>ZMr+Mje{&4!/[M]x1L#݈'д< Wɑg+#@PY2\gEdF]n ۟>hm)i /eU:J%FmS^:!F]ߨ>-O P;vUlH,K.:@f$OcRFm2x"R;U+ #/%"HJZ \Eb~v&ÆVjrMTәzNźkopvyd$y+n!ҝu Z qGKJYǨSYP9fE,*V]4q`$->Hu02vU:4cW,PXfׅxn٨nigi͟Pha4ξrλ[d'戽1i+ƐL!eHV 1bK+3m(ΞO6!hHFd2v̺Lr##f;aqZq1;ӎ}AmQg3L,.s  Ԝi則9tdTh|"o`Jn4g*2r /:J45 Y#a}C"X$Dag.N3)ٙ슋c\=.x[C.Rp6HN(ʙ . vnx@Eyz|0xx}<ܔ+Uc'WǷgÛTs~lv`eeGT)]%Ⴗp+r{S&43 ; OBNgw%MZwJĬ, HB4<ҨX9td %!-;Ie.z+t,]2s;흉e-LݣjfvE^ xsE~c /ӮJI#VDYycbpBYS2We.0}exY;MxYђ( A%Krޅ AD2``Z)c:DPpOB$`]୷AJ:C uɱZW_ID&tr}ۜOQ}X6/K=Dz6Eg969B C`GeE'sF9,M"rkKҶlb|E郪|k_e6@:Ea4 c$I%P&CTtpNsd*`@?\9~/\{ 85m8YW&'`i!9*$#)%BT X쓩W/$Rϵ4mcH2$7DBHA#`,!s[cfDڲf3RdžK@ 6q@OdCb" 'IRv-{!9&:&T{$,/c M8r`3(5Mbx@ gɇ8fB’_s)IE;޿Mӻő:R;eu!1vxQ]TgOKgZޛ t$Gh)G~:ŋ0t4 ^UͱV3@ P&2cqֺ> Ӧyݢdzy=vyw~3w^%;454w_x5..W?@j+x6mN Ϯ]:yK®kZ+Σ65g槗Y~>zv|z\+\q wV; !E|]||܏vXLv^tL6t6hFiVU a(V>MƣO=sx1wq}Vuqm]`z^uZ3qa!udRMSB<(c~yA DMq7R|8}7ƲC [T/|aY -2{4X-wu.V\&Mm~ /HOoNߗo^_8}Szjh- =FZDX]EQ pg~S[ƻ[L--l15]^us÷uy-cK b7Q;s8kEֹgII$߸"1yQYC"61mC֥䘑9|Nһ6Y۞=?)\;.!Ҹ6qy[¤9;guE/xiUeRwj}qCIC6[<7漕 gDy<2h0.JJ*NWΧ\ϞDLmSGD{Hj|!27׫X'ۨHomGŸNgrຶPUF5 B9dXx0V59 ֿM7FA텾ϟowɇq )o +ܯv =%\|k{MgA-ht&%}[ Y?raVb3/ĴtVSlPͮ.G]tr(BPeR匂*Z+B't}VpYVӪV=H~=mHFƣZҞR}!JZ5Z{(?_GbZV +˴Ҝ CSRR+v2Dڅ+ aդ <e );4"EL< G*'@8 (uL fV&e΁_bPZy3#yW)J=pE:w|Wڵ%v.}>ݮPE{[j9ANŻ?դuɢE==r|vw|/ >cZCqn7:9NJ?9=_ s&뤽b2=s>GnhI0&*涏ZuZf#Wc^  A@Dt~7yi_kmgk-} ͟w\QM֑.9*ҧ\Y+]x$SOrTw~jn[:z]1O=x,G$]UU{FL:KZ0Yy>+&IwO}~?%}-λdy~N=cj k^ s QA2;e.MOE5/ ^A1s\ ) 9 &0B앀L2x"x}8O{nn呾56Vcq0M4E":Z5Z'ÒՄىI AɼEq@<jvziy6Q)Ck1(Yƨpe"@{')٭½s80r<'6%OJxQ*͂ב<aL9Ik2>͡;FK{cSݖtA zH"E]{v𰀋N _ċ7'TI|ƅ d9DT '$O9[PtH*`e5n=;.;{Nm,LNmdihGd2OVs?N6 V_22f%*8mכn-$͡rlriF귃W)5Vhfm-%/[ևmwl54`gv7s[v]y돝Em ݸhې ]Y#7+_ֆռ 3`>kִJ%Q^o03%TER]ib`I.7)?FqkWe_okh>e/'SYhmm WbaJSB.6?jiE-;QCβȑ m-4F>B+[1M*xz4..Fx]!'/m;S>;Íӑ [ߒRbcTil4 < u~T2"iob$>$O3}_/ ?cl@}ews)FX2‰L?7T?S|)uL5m&0ュc6W\O 0?`3Hn/l8/|e[Lx|ZM$=0;^4' &֝NOBst~S8/ 3c05O̤j28P [FL)e.zUNw,niė?-C 0JV;)CF\z[mSR/Iݽ+ݛ> E ļgE=@|!CEYdӒՄv7=יmR|\\pu)֮|uU ]ǬvrIrJw7wq>CVR&`"k"QK@󨢚+&9NxQ>o$7-נ;bIzfWoQLsԎ UO^mRpsӅ jRq+)۩%>Nt|'Ilg[HiY80X#J`!)V0(F>Vs{1(zّrE؄ A2 ǨU1kB)µkYf/{6qd kڢCF$OL7"$}F##_՟B,_Chv;n~*)YgMt"`%j˽"x"L@'%vu݉`t*9rЂ [CNX B=F/6ªq(W^)ǝ6*ydRp1}ni((I Y:8Q`RmPa IiH G#bby' _4`j8&T[)`(A#)e ++_`P R]1"iDH 4kD[ eDL瘻cN#ʻ'3M^PpeQjWpvsHB @1A‚ `I1B ׭ﮈ0cFz+EجVraG T]3%Dd5[c(@`MvHmRܾHys0} *dĹABjEvrW*^ַd<ݑw~dǃOjC/Ԯybusѕ\Ln8tʑqšA\حNIJ/8&Q~8}U79|i0 !_0EU(Ed ."1 "-/[WV'f$YYO%g F"B)Be.)xHVsd6R#ySS1G Em:2,"1rb0nbјd6qYŊnE3n&N>{xAj]cWbM]ꏂG?/ +/JRS(ex8iduٺ6Յ/O%?BR2E0=X9 1&hg(4FzrKlVx)C0!B* zgb@2b=6MVHKrGle'Ch0-`VXbcF BJKsMwzސ[}M%Z`{$t9-=:WӉoPɣ3yUڜJ 38+=oK/,МSPAK9ϒbs??_4R]2?Ltj:0|P`r7PNv1$Z| ̬r pKA~tݾ:us-F.E)ÑCP<ֲf%o @ 0[y]X:[fJzκ14_&¬ip6y[Ҕ R5ӹ ޒlܓ+Y&U%.LցM7!ݛtMȝUU+t$Z? {m؇]LmwytQ=1aiM{Ƕ}y-7ܭhG{o]&٧`^{/S^8t;b9Ccȷ':4mmmk\oa:OەZGK!J3`MLj9+̢Z+IY S >TaMņ`@}ziPYV \>vWsz~84rښds}ҧ|E/v{o.w22w}[o3}_|ŸPR=4=v`e/LdL/^]5tйon0UW`3l4{=,u˥7ǝnT1MGW.fF\4>6;BV:a! UΔM%؟K^!NA0_M*®bM(&а tP[~ӓzId`MWP0ԛ?U^/ ^Cƣ$~U?FMa-w'qACQrT$'EO/?3}'sz_?%`M{MT揰/WsCq5J{U-`|Y]!r~޾N/B>~}ãjLWN1otcy#Xm4og`=&3:4g v3NV-#?gBe2x ]wO47 zY #H(򅖜N)bxЄ^ly egQgN\0sMƃؘC޹s|߼e嬝/?o~̋qa`F/kiL>L_g &uD65>*d Yk3vȋN\\75^`v~HݺRqoUmhV8s&lj_j_~^Gi,ɓ^VNY>i3q62v[!<7\X1{;fv (ZПCDfMG-Ұ@v0{=|i.od#kc/@O:Wq)RxST|=kEEPv/;ZqcT+>c3It_/wq~ͥ(e7g_"_FWtܾXSp$_ CȄL8J6BI:ldcwX6(吗]+t'beDVdʐ`DVFCW郺yels!ɺ XZƽC}Iyyo*=䨑kP¢X(&iU:3ōv)-*m$kǨESmTyW;GpQ{ycW1k Wόj%X`T9F$㑅HL^jʈhA #(H8HgM$Nq)u6xozl|CM9/6F^s/sjw8#/RO+&UʥܘLw^3%`1VY >Ac,`(3*h2:ڀm9T 4_QP*:d2l_qۊ.եٕ"f3rl]ՙPI/)oZ;E]Ce_+7 =a=9Gt7ͪG"y .0a|QXRE--(*L2u0B`)#IQ QgrZGy3ouF*4 H#%,@)"b$`m \R$`\hrzŀj=N; PH(/4'(xAcfM*{VvMPg9 ng5ql݄٧4eέ-牅9{fYZeb:~%.h~%0Geޘ }^L3tvGԏ çh/~Cg;suK%~K`0t>]Z#wx W3F:F/4Ι:~o?~X9({&譤'*"^VeF7̵s&7v|$jk>l0AHRJD3~3)JJD֧DLR.%KLHUܐ5 I\*n\%i%jJR6w+J$LsVU7WIF\s(&Bސ)7cf'inJRjٙh8e2W "fU[1WIZInt+P3P5J2z+*I[Bw+(bt2(}U[1WIZztRԹ_R)3-@`n &q͘$m7W e3W/\Tݐ1"7c3$-k=JR\}9 81dM7yai{X+MCx# %f8>J\uqu3}e3;3} (PeMYޫIVwNur-y!˂! ˢ,Pd ךm)vC'9N\oeLj$v ? 2WI`No\%q%sխ %k1WT +Xr3把|-7W -t@ ٻ6WA2֗E,Cxg,,:'璲ݖ(;H"$OU::F銼nIth1tpbSK: WBW'HWΧ@iAtŀ[ ]nTa)thI;]1J'kWHWvAtŀrW+bQ:Y:E E_ ]n.ձUA9E!%DW 8,n\FRGfQSMPzAtŀr620ܰ+F{zR;ƈa4BfQGw@xh%虤._o_*6YtT:DyKP[рOBr>=7'!hUU oZ<:RIͺaհ)=Hn$]QcNqN>I5b'-ؓ9fqu$~`یU) 3!9*_*X )%$ڨf){RXuݷn\ D@K$X'5/V[ZQW!5*dlzaCKU+ܜ-Q"R}W:a _@ezm|#rd2 VԠ uszR uW4G e2a̷.Q9Z Ȅv%H  OQU0uk)& A'b΂G ݄b :TjMPI!0Ej>dT jGy(y3@fih!\=+AtbE,N)(J892r$XT5@,NRl eZA!N ƺ]u Rq*NRVa֒YAT"F_|f(`3Xo $$ e'R!pPBY3' Y,kOM2+V*{M23.=/wnbA\ JHN+ zP( / 0gךcfԌCa E5FHKCH4pY/xp`\Sclci0Ih j hN\76xk+fnQ4 C5kփ*H]6 |t>g҃d &S@rZx tmKzgAц?jo9hE8ڡ*f`)o"a2نf@2>/vO 4%鰀d*AنFp*qKކRZG7$ЭwE< \T*M .Fgr\UKcb`XKEv,*f-$_i T$b4uIJs't\5wgW,"$7&.McV/˱ߋ[6~+~VJJYb~kM:|(NӃ۞Jcu5ֱF>n ~w~ޛXr/)VZoAVkݿvjߤW._nϷ_ݍ?~7̧8[zOzݾ>{ZJ>Kͯ3nn?i{q>UV:>ۺ0ބ^/c߮>sqjk[ =yVqm:}F?Ut-׾N ȥ\t.T.uN ^*1^@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $NuMJ rz1N  oQ>K8N D#N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@':/  '֋quN N@^E+N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@':IZCXpr@A; N tN T $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@:'З[Q=>W?t[Mջ _v~nn.aѐ[q Zq U1.q޸&qK }LAMv˹ ~)thS:vJ ]'}˫C0]׿: _>EW$ud@WVCO)L۳Wx6׼1{Z}W[ӀZ)w?*鏿qum {ZRHHgF= \2~~}>o]e+_wsN.1沽B.v_VKLP׮\ _km$Gb&00YvL%KjIvU}edYr,'B&' ^&}fUtD٣ꓝ_h?=,2q'3)-,.WϾBz懳٧dB-GVFOY6ﴝSA坿މHd VESIUe6B@s0gn Ojn(pn(hhn((7Mގn((;DWXj*p_a*z7佛Jpw ]3ꪠ}!UA{uJ]RWj*p_>-j;]WWo7 BW.@W誠UWW-ҕKtUBNStEp]VUADOWo (]r `;CW3v*(zte&$!*{y0*p Pڍٖ]!*_A4EWWUA+L骠T^3xuzg Zz0Q: -h t%{:ڦ%"ťЗ -MPK(v#e-ā8p!\>G»f3e C<Ӫ TK+9HI|ZQ Reᷬ0P sAYt`[%4Y%t+t1W.c)ͳj,kekvW\W2Fxʐ UJXΫzV%ˍ QGt'ZV9rI̕ +}DA;-ЪrJlپ&ݠzpt[y6%5,֧/_8W!5_Ml>Z^WÉYvXwO<\RݿD2rA(?8&+)BL8hm<"廔 a)}Nj.gT1uƱ_?oʌf\}t)zNpͻ}pR.W]*tSiGpvAkl&'NV)l3Ƶ-\6m}_)_^\0-$Z\C!/|.Z8`D Ovk5Ѭ}-NM+pTt{1V,\ʁҗ`ҞNR/܆y,yytUs2٢RYa+)]ggwկތF$g.,_^cv4˳ n8,rsW񧳺4+'Ndud%;uTRY%qy*'3Q٬$46SMA9>tyE1ܼtFd1i~vK$.)(&;.Y1[KxW3-TzuU)txz1K[UnH%M+9k4ɀkE%w>^^%ut}ax=&gɺgy}&^c$R%w IE肉$LrN1#8sƆFPYVʛl:ˬTX%ٔ{Cц gYEB 8HeN譕"ۮժ}+Ji=$NN>'>@^LγS3㏗JQ;~`~uv3;9\sakfіoko^Tm~skB|Ц TN-QD)$ ǥglNϕ3 )x~vvm5ѻ8vUn>4mV*6bHzj6hûώZѦ8UɃeaN.vIyMWGח2f_ݻHڤ7G-?b =|/N$$(ƥ޵ WR`׃˰ h<՞J#*0*rr|1ʽ{Zhd-od-hZ1Yk|0HᙓΩcd=\1J)1E[6s1|8L C#U) n #iPHЙjp"q:6<ԻPM/_4i ǍeE=d:]ua+zlJwbnWg}ke 'M`U頸EBDH9V6(IQZh U; 9׈$ UpaL8lNS s`-xtZdw< -xĨ5eVGTr0~;%hV( ~n~+%Չ$]5C+> ٙά,hOA(~\ɑm(ִxzoKYIALJ&'*/U30[B{9yX͌^%~gɚ>Z&CX C̘ Đ"P .rtJ;̍sJP4))Cc4F2R ʁˌzdA@vt,}cp'+^9o6ЍqJT1|7!m>|#3z3'mBbJyqb8tY2:@2E Z3 YxiKg{h$/#{&{2x1T콁-9p pT\l1rq!ݰ"m>;&)\eJB$%]awlh$-dRLyOG g/x)<:ol,qC& X-m QhRr)i2-'=ecp#2qҪ1\]:T^a'L>Jm:rEujy>ԝh0؍X,ʄebFHC!,B ԜUpwI2DQ8VbrxHS#1 Vraw]xvCm >N$KӴ򃑛c(b^N"ma{U7Sד:`Gh@!ۣkKJ >7l ד~c $T\le;owe'FYyW"(#SL8KJ[:x !Mek(KA(3jkHBeY-@[ ?(6y&f@G5a"DRM3ccp3csZ1.l2B0 _ ׈J7E\ܬ8>,,m*7?]+3N&$4 I1B4]d*M9ɘNJ2{0c +P= A@M)鵌0L w"9̂MoǣbYW^DɖK9.y̜iHS=6<5!g6Vho5=HrI^tĂ6hQtHX 518vWˆDZfD3bψj1ԖQ  $CkH$H&,~ЬBDӆNJ Y. 胕I'͍H6 g eso YU'^UզƬX^4 y,r$O sA%ЄĽu r֣Ƞ]4=//c6̇8>| [zqd<]}NLwy:b >DяB2z;G>q"W63D+&*]E ?f-Qc l+D63&x):hψyт6}lf18dY飣o-v:~_]wNt)!66iZG6eH%Sr%՞%:r{Yiu7ey˲&GKd4DC('tV tczriJTQǴBYPpGEA92[ ;B%!DC}'sDkh 5~wEA2 0;ڡ{y{,wRO?^쨖;PN){9hUJRpɅD  Y"cq>_l>1ӏ.1*V VujBήoN?bT:q֣0~BPj?I?XR0y?'. }; >'8Ql& 3/oFԥġB0H[Qb| h<./[K8G˙\?~wO??x xkoû7, D’ܗ_DF|<?"!‘GsVu:Oާ;GɳUOy/ |ufIy< &uD9hkcZ$1w:@P72[Njs=DٮGL]u #CL]l{K,->"T "~40\&eu&\0 L*Qk:ƟFZՉ|~34K1ކw3 BU8`VGAiRITnW09D!!D! D!i 6A"W 7"<Dq~ %2j:qmc3cB972 /";/C4H谄mODd1jc@cգɶ&;Sʪ yĵ4O.^a{Ԃ`2sg)k-}BmZ ~8?/$ _-xZ8LevL(9qփ"wy{؁ӽhi;T®"HnTn`2S֦FEL0ubࢶ =9&r6ȹQjEؠxkZ ضa9 %x[LRmFzغ<Խo|&˘ V֢g,}Z50/A +Svd0RH޼.{ҾyQ)iBHg(4FzrKlLAHRVa S3D佖豉hj4BZ"Z6/[# 1ᢕ0=#_T1->P Y*b,z%\wwgX5nW~-!+yᶰ"H ll:ɺt *e#LP'8Ӵ98.^*󄮔3]99H OCeQ>^?$*6?Gm ƌOyV-7z<ă_Lxj:c''լ"o~-Ls0> Rޟ+3+C|Ț' e:_A2<,h{nXu5f>7'6_yV~[ƫna_<.<xV?0p-0aY~]lgXѶ/Ѷ*0}"ms$"]ۡm!nJ\jr}̕: C qhnK:^>vu-!fxzC'g#B餺 'r1(rγ A2R"TvQ I6/DQ8Kۓ27VR3'ٌ?_vk',L ňFDK#2J ¡1MT !rƽb (C6J`]Čŀ4"`-hh8aLԃK9Z)YoX4|9^)Xj7hnh;7S•V̊'5RRZ UZˢ&yn)W @jGxTh,<&W<(dv{Өzzid4+.XN1R8(JDsDH4 >$TU`a˵hsOq8~Ǟ b- h ~*@]t!P.5C>hU "׬'@ºjZ4GC2"QFQ۞>L`;-Km($M UW 5@~u РOb.q6wA3+ed1 ̰'g =ْB4OU}ޔ5į4 ,FEP\Cg¨X]˫m (5sDf]1Gy/S?͠F;RAIv!Z=ixixTJ_zMy\މ~tJPIM۸^hӐ%>Cʦ!̂/on\"-4fGĂSw[[7 |UJLI1zzŕ,IJ{G3M&lYj]-r,?oaiu#bN^Su!+!?23ˌi]XkFx]AޤBMdT8‘y>$}}KJ@8ƌTS ZT N>EFNj?ƌ^8(;()%50N緣_4߲0S0LB.'>ֳJ?i7JlLRUS4,N;;T{?Ud)9CscUP=ģyݠ˒۟h#q|C$9a4xg&*@2bM?-vT9E)оǴVv,H|}C- (ncO(BzAp62@5Qa&2@=Hb_G^+J/qyr[=rU~-څUG\)1"MP4BCu(M]2Quf6|=\/s<-2GI!&ՄDߗD$z6aWE)[+:r_E>Xc`O[NPנa^3hoztOc hb05%ap󨢚+&9NxQ >ެI;Awn͠]ra$TҪ6]K<'#zw:ќ^[ [*re9[}i#I{wP; EK;ݯQmD "␪XD02"X.cGA9j (WKM)pZF)򐺻s-a`5({k=̵i?2(k4&5u/փu[Kw;u¦"x"hv'OQ\ЪpvZc KA*b)+NzUpX)p>^X$,JAi8C6,U P8 BB( [H0F#%ra?4%,u&+n'Y0B5;&2J̠ª"#1,` Jkz/Erp"fR3-Zc=3-h7Is(@|"`Kx̽P`Na<0WƔgןs/SOb:,^«We-U|gn#,>)\'wO#wǟSY$<%tï{R iTN 䢠L u68 9+6r[į^WB]Aq ]}h)t"\$խ{Ges}{_kjseU{֚ե~Q즸 ߙNFk4}VVANLz,ٞ +I?^(jyOxa9B'vAΕe.a]Yl#3JV̵Q*6([XARL̨ &Fҩ^'LÄ'?Y@838j 3x6ZɅ1^H3%OtzP0v`'_}6HF_ dzQ ?vRqo^< lF솝'躚,diz(t&*zp{l7) -ܲd7 G OhRstu]'!mHaæM1lȅɺ;\:B&oP{@{ޠLkް;kn2ҍsrnV]= IO2$Jq]7R(NLu]-QWj !nt`Piz}uW]-PWQԑbFWT2]WJ)vu=s+~k+n^tZ6nRJJ]{V}<5Sqx@I/IsL&-bכMvir'XMMGD9;M ^чQEN ;+=]'u2%|zޙ6\.8)M1Y+kS԰iSú95."i Nu3Qc/IFG4Jh,pDc&`,v+Mҋ@$]WJ95EWI9+[|r7R`+LjʮXnt)+/sוRj+>sg+nug_ 8~ѕ҆])Xw{ɔrK]o6Ó f|y1\㥙GukмOuVq̯f9eR%'yZ5!u4S);ZHN)gu$<#9Aڳ%N?!+aRV]-PW}Gr.ѕ]WJhulHGp]1Ch;w])_uD]yz aUq+i%ujNh hJq}7ٕ҆J)SZu@]Уv+pIMM3ʐܪ*zSv`q])nr HgO\uH]% t`"׍^t2J)W]}E{V=ACKN/ٝrb۽h&[(X>'G{fpݑhH7DQƙY&XVK?#L)8?*@vn:qR2}W(Ia7?>ʋdecSj au*2%.Y4S 1~MՋʺ+6OoH7]_^o.xs!0\wCsʤJNf=Lc(Cݜ5%dh.{yIw$7i4J 턑]GrzJbOsוԋ@zEu֭Z1օtG7 ב+gWJ)vuňϐ:=ntҍ6^W+ۓ{ *pH θ^t]WJ9%髮EWBfOL+ŵԋXQYu@]r$= ]7R\Ji]WtW]-PW#]X\? w3T+Ljr.ƞ 8GWMݬ`P)y"ts_l>5ռ}p5ߜ|v~%~wzx}“W/իWA_sߟ7\h͋s*?}~U/d[7W>?u.c!w@y_m_޽F.r Яw~-K05 <]=>9b*pDj\|Ҥ~}ڼwۨ=yru'Ҝjj|[a|W{N[~a9kU #_,{<BxX6ϑ/S~2?>CAAw~՝\ _Mm?}{1ħfl?]˒Z8d笏c#fOX)ď3@1~q~ ?z}d3v}l?]s?luVnڠ~Yi0&(5qc `J.RX4mzPhƅKh ͳ4 c, 4xٵ8<tJYAoXXhCY8D޻VAqT.0S q-F?:"Ra<dlS2`ս6x%S:$,dHAqf( 2M,SL"#X^9L’V>lFZȄv% m7bJE2nȌ1Lo}s萰Ȣ} idQOqm <{af9heB115$H.!!4 8 C#)3ϑQDW 2`#z&F/HJH\a*.g D]T l0dC?° () Hu~;8;Fu!) E>JG=C+-O6F ˅Gh؛"zP6#-@/$@AYx0j1?{ȍe3eX$Y`f| *nk{$ۚr\*n [>$/=CAn23.Ak^\[qJb6PBr|\1L*t8!a%ׂ}G<]gMt诗W/79Jok;U0X_2f=f~pv˺ L0mZI|txBT^*KPM«m|7$]n}bkAhpU 30똂'z$;u,Jq=^bm{ #Ak=RKn{ {sP6DR!' +.jf5ܘ j c^ ns m ;[X;]C:xP:m`E[LnѵH# }퀠aȡ/pFL"(Y:Wnv6W)-i/v>CV@;35\_~,**bZC8^#:RB A1,'r*Z P?hҽ,gMӪѠTfeTJРz+u UDVZEQeP`!ޢ> 6,Ar/S@w+wy l/kڛeǹ+ł ]@nnpƶ^,4zlS@ظZ{jw$.^eFZ <%2`a]iSWQeF *`:)r;LwzP)a!K;hJy U~ÎgV(׭`E^1H"^SuR4N $\rR^ycȈaUè#0ˢ#':imz U lU?<631>54 76xk3fnhR75^TA*C%6=3n@Lܣu՜@׶k޳jAš &&K-xs9EwgѫdPi*:JQDàekbhQ+[iM F:zk*^kO^(T +=`$ɀn - Pi;bt~1ۖ\ \.!b!c2chBM9BEҁClPb}'t\-f !KkBF9z['~b!(D]c' ^/~/n7/V[W"Uju !^kj TPNo9?l|mAilPɥoPwօoϓ;쟼7r%)Vf?]C֛s{q;٪Iy/ϧwco>lOzX\ݜyM8UW%7mOZ7>>o^p6թt ڶ>s^˶^.f=mzhumpfn*  WpM nn1 Ntz0I@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $NuE&'0<qEr}@@i tN ~CJ@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $Nu%^98TPhSw1JJ:F'7'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qzUMN ~fq.j@@@@qϒ8 ,:qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 `k=?~`~p[Mv/7}!8XZ즽,e ^͹D O8f-2_"Pѷ'O7G|Ӕu|#߷ Z|?߲`m|_]᧵|u4^^2@oOlQڶ4-vUqMG{@mؗsPQ`M5ZUKd?冄y ]!]`X]1+RY`vt(]:B@QDW 8jx׿|1(:BroWDW.D+km-th:]1(1ҕ]MQ ]1\_M1hՕNi:B 1++T,3NWayڟUL*֮"SO1phit(:BJ|CMkW vMj+FkQ:w=sT܄%Zʹ;Ө ^2>z_y>r޾,<:גL>٩1| omC0jVD \jޕN ki̾7"#)Y\_] #ەF/k?rro<4Z5&ߤ _#%ȚM62; nJM k Ӆ>4acm{7ȩsaT{ӐUIQc4ߛCIdil|~5o=о=iΗoFkw;Ɍ'?QC88BM8Ly9%i4$)/_:FH"2|.Z?ub) ]!]YI]1Dj] ]1Z2S+FiS"bVCWҦbv tBWHWNPu_tpmP&eQմvŀnҵ&=yu(:B F/jW{ԝnZѦ8uJkH*lM*VCW +F&(c_`y3pcZꫡ+̡w*Rj/0Qpݡ CK0t5X1FЕzл'Bt6 >Ȝ[]#C87p2Ƹg:aQo,8f{h}G]Esu^ j'ʍ{n6/jr>z37|qxv˗=AgPw~}~5zB T/3Ζm~s_߼wO<|m50ƑtڠǷJh"Pvϼόz;WzQ|p8~: mPazQn$zxɪWMfy7x?n˖DDd&όHtj҉*['(>Cz3Q"ځ4/ V5n(ODM41T̬>D 43ш`tY'(y #VU#7j.ri(atx9ޖ7Ef,x49Di-oO|N:E*&If&~|NOv%kҿh%U<7V/i'+dJظZUV VX8v;JZ+Gp2w7ϫ, +]aqw'gy.8>w=3 6ڔ3jWYtFe3\kQٌOu8ZTlSU=ެ~ 2/}[ _'g$h H`~Bz}lǞ'Y_yirKT-K fɯ% U[ O<_irM_'P3BHPܗ[n* x}MsI;}a'!zVcxL>B浫HpPp "@P&+%:E6Zx|qz iޞ6>5l)yod' QҤwב42#SM 6x 7Z2==P ߋdxg+R-t^NW@B:B Τd*#]1k+կ}t(qpt&b7j^*yTMy[pS5ok BW_ ]g?d2}OsѬ3gөR)^ zȈv#O8Cơ ݏC&vAA^hC 9i;5&/Ԙ\SGiowڵyīe*?ժҠkmP|24F%zged_wdMȻAN%8Lo4)8jru}2Ժ99n/tr7n)V7Rӹ.aН2.G{o'p(t5 թq(+6"`cT5tpBW6ܸ:Jxl%U ]1@;]1ʤ(xj+TCW 7Uz hPơ$c+UU 2T]T ]1ZNWy#+Mt5-phu6L^]1JY:Js-UDW|bio2j)bL'7jɿd*P]a>)[Ͼ+Kb~ Aꫡ̡+T/׍lLSVߘfGoŽlAtFvqO>ÿC9aCU9P!!lxkHF++R5tdM-th:]1h,:ӹ /b>BW6L`8[ ]1CKơb ]!]9bPWDW +gz8_!rK-@g Y/⓼-ZHVjHI=-Rdفؼ J+B+}tE(tteA)Վkw*Åt9Er`6:uUBQʁYȩ]+AIɈ| \&$& LW;HWAJa8d@ ]\MշCW~ƻ箢m*FG*UʐlaAW>ճUo)Wؑm$!Kfʓw8gRr0]V2,w޹(C^Y65K$GhJ=#Hn#9dІ]I=HWwģP*jJ)'"O\)+@q+Bwq(ttUB3+l$j6u:u"fA2Z*)w5SװKLKЕd%#BZ6tEp5T;e+]!) z82] ]y# zǻ"Vs+BDt SHW(9U `cu ]!Z+շCWaÞ0&10ۮOLWQhjCZoAW!ճU;@jª#o ͓ N: dA:Iqptf7 `@p7ڠR7 t1+cCWOhöKP*jJY'Bn\JY+dH.ҕA+dCWZ.tEhNej2]m nK A]~ggs2 `@˽x^m\B⥣uRRӳ'jvtoJϚ Tޅ_wޣ񗒁~wOE7FMhF{oWG`->v0l /ç@S" ^FDr7(:+{aGTH|=?Һ9)<='$=h%Խ5BUv[΀ ӿ6tEhH͹]+I8 &Nr\ :|.P|r{HaK"|R'{)DN"]oˆv8*4:շBWF S{JLXU%ut?U%RO'eSL`2xKr{ RSiV<|8%/T1SѺH.Frpvšr;\$J֤a#GrH9 #".BR.tEhNqvdP^9FtEcCW7"[N1@]+eN2+%]vFt5X6tEpZH˹]++]!`p ]_!U2u"tr)ւϓA+"&4*.ҕS K'$[OvK4q(LW;HWUrWI#\ ]Z+Bmo`aO FWq*r0 NH2T;DdzRҝO%?bH!h,ۇ/oE% ݗ hS%=* ߀AZo [jIRJ  )X]\m!u"ee2]mT!G1|Hp+D 2 Ll^ЕNt.th=Pv(F(`DWX+>ϼa]Z|0H(CwzV'NpG6 9վtзRN`dоqPjkī'NpBW֪P]"]U0+Onp%BFPJꛡ+9PrObU`/LWQprnmq(S)#Jfzlg./S1giրɭ9*k NLt@^ ]覂Gvx2M zQ[c$K:zI$J:Ft5h6tEpUBWAtE(ttF3+ wEp ]ZR+Bi ҕV$0(6 0>O .tEhutE(ttelɻ"•`6u"NdAN {'$M Q"]9gTsVcCWW+.tEh] /3]]yS8Ik6:w%B'Q]]`ǻBZ]ZABsۡ+5PjOX-][ƪ,ފ^Ll 5wUI0Ho_|! 4~DmjZ%7ġ77'_WHKxh;>O'W5ބiQώƣW?n{nVx:O.~Q{ $lq#V8mS&O뎕ƃ[4R3Tj9B/>w}MguwI{lvIf]~ػЋ^f`}M X ߜ}IQU|M4껢F#_l?1bgmEgt˛z5%Km@ξZTzOyoSs.ݯ[hO5{MMzSOO _Zs BoHz4j(Xk_BCyZSίYE92~]QӤoɫ3!#q_w}}<t",Oy{ y7-?,k|m1/m۲^Bx% QT4BW?"+Q@шVպb p~sktEUե.Kc1J\/Ȣ,Zk, RB h#bL6reF4.Ӹ"8`w>CpSlyXWH ɧ|:=V̫sYsk00dVʛ8?k-^ܝqHz^/) @[$]~zT/:NR4~^` gYw7h!EW 6/geS >]3 q 0dv<;ꜘ>Yn=:;NFîٿ΁+VPnMMO~v<؄pQr9+/oE|ܾ( " Mc7),C(-+gə:AB976ݢVȶt('u*#Qi)8q "ig$N;ҝҲ9ƝҬϚeL;N^od|0LI!|Y-%$}O\XaBV{PpRMm'90ղ,;) :Avd/A2oirA.%}>k0>YE'l|(*pYc,6-erϪRZY@3$6izA.J7/N0vƣX&dR" G&JYˉG\Qg{xfӕ& Y@3D6aBDҬB"uΦ2[2 mY5Ɩ%@qYzm-ckk[S5A.mkAkW1Bٟ3\#l o]⍤Gn [̏Go.1}WTo[U_'Ӷi1C ~|C7we;~XMYyZ~|srkڇfp#ݤ֮ntS{?ykzC~b%:p_DEP6niG6;(Xa5c~BTX>Z C+'v.5q^-{+F(}qko@f\{Ş7>{EzN?uJ\袸%^b~|8\\TĴoqmktӮvkhZ%Eܼa\z`:r咽GInz|R쎩hG{gF6""JuA*h+ WVmL)qk~?A/na[: .W^^oOWwY厩^4 mKe:XY#JJP^[jn/ L#D0|)`kUJyo#?AM=V3W|%r/WCc!4El ftd)"rg=JY6y(2 hwn6)еI1&0MBx%lA&\4a`g\֚,(`cL6(%K&z&bf>6) ˡi>m&'lԁrYO4{9Y>\ -h1T,灥uеIYA&EfMgNZ<1e{w ,[FCqdJJZgXTXN`e49yLCnyXy N4i`q,k<7/J.8au`Iߌ=(%ېD,=(- #؄7Lj͓#P{+ T#}y_=X$JXm[_3ݓfBakeEQR-[h(R@snoSƗ64p3FB#5hwn!.qIwe@vD=v,,h厊.R_&._/jc)ˋ׷~3vu0f7g^{z78ah<︶o?2r0_LN_ļ=aLx(#_~W z̆ʯ1c'EqGK)΢86I uo_"pTkeaiYDAk$qA1+XM)E.qՈkC&Q 4mC+)^bfݘK¬#cp.SNJ݆/&{MM3MB~?D~!i<1}m)Zc[JvJ~ɓu6="c2]օ4hguX"?L?ϛ%wl@٢ful@O5=mXK1k'f9÷`qǺcV>ɹ`.X|J L'!8UM[} ձ6mǬ%)\`^'(9:2NCf妚6gȗ;k2x0Mo?Dpq>:*g/f^@qM[냎HerOHBhͧ*V^ ~ǒkWV]8P^DY"l k# ʦu)PƩ1%8PˉZknq_Q5'Tγ;.N@F?hŽ#jsZ%XG:<BT k=yD*ͣ,yپ?/ ț% Zk3ly!ߘ'V7ZJPxh`Y(kahϊϑB] uw{Wzχ=g7;?FQءE EN҇ - v=h(h~U]{ ʷ1ÓoZ] #Le3bE݌a%a}ApunJX,/YIv#[ I!\De{'Cκ@Rt^y8u",ӛg :Uݠ$V(x`<" zs?-V\Zw ;5kys;r[PX0ů`>R4|zZ@TJQ1Рn^D׺ Ƈ qaSSIe6}IV"PB h5wH?^ h҃Wa E 9^[37^,iIókb}J":k]+;(Ƭx?N:^=H);Kq5Ԯ܏n`=CqŬPe! gz (#4lkC*~5D^֩'*4BK>uaJ pu#Jٻ߂E*/>PެLDFqMԇ/%!N8^Co90vE `~^aъirPͯy~5,˥|r8F@ʍNי3%=(^4yɀd\o(oJ TU A`V|8&]heiQ#,Gc Ƈ+q𢕵!R 43/P$Ki_-oχ=Cx6>؁"Y\;W_nGo@(ogP'($uIXӗՔhNvĘ$ ^~(ủ6"&dZP~񜬻Gop V)pT&,:%:Zq־8UD}YLD㵹LPil_ qnP E"7*@YIMXaOŃ=e ׇ!{==ׂuMd>Pd !wϰǨw4#nM/ 독YTʑ<ZsFh5 0`ZGН1H2Jج~x<,KX1-&˪ɯOYG>~^yf.tt?Η4 >,]lOOw$Gu/Z#sdJ J HDe9y:qq%nC_~v~4?A1ǤhI sĔA b,yB)V#|޿W>q[5!Yhd\d퀗~n?'ecpj6O9.`ePSbFк>HFX ͹vK.8Ӈ\l.b8cڏeym_z鶃9g(>mcrF9Eh!Ӧyf1IK֥%=.zfzA;?p88"s# ą}z=M_eb_s~4ZYyLӲBv[v_A3'jhc馜T{=ͪȲי8q 1J}!"#5?]8am|Oh_E0`6 0/o~mC֊"|.:0=v~Qy j)/ Di W5&qņ؜ >Qo!6tba 1N9 $NvLQH?1-9a;e-jq~97 BJ@>)kᰬ|ڎNiJvz#HܺQY T{ W)5HB/\BNߺ0S&w>Lp:C p#׌ksbhME/۩> Wp9ql^LQSŖ dɂIShwq!c;V߉f J7XBP˴cYf@ͤk).*i(c6\it+!.i᠝R}PҰۻY5Y~\f W`XjպEsl'Oʼj74i*S W$y_p.&ݳ\iiK^ `*1oup{j-uk(*YueZi}JAIE@|wbHVEnQ:ad4j]"!AdcLgDaL^6Īs!L[nqa4skG/kǠTK~0uIB8;0,Q9LU ?'g>680]De}yˮ5ϤJ)f'H/j{ I;EY,! r{cKibzAxpt4n-?HW;{ogPLB9uuq@<}&ߘ탟 ;ϪH뛫ҏ:9G[v^u6n&<&l1l>-5`U ȄrQJ{hVa{GoۺDԻ7dHBV~jcZ@Ud-=R)K0(Ly5%&lܾ>P<“n O{VˎÖ vrJ,_NТP%یNc]:;6d%[_L0*o3o0[ZV9{0bY$+i$<3 5NUV:&50W/uim]Y[ǸqlY $G~[ 1# +LXڪt qD_ h>]dfE*,S j Rmv^x{Fn2te.p""/4C`p]xs_W2d2aYkܞ1<`p LHJ' CX-- Cb" HcCbDyա!",%Yda(X} ($c)[E8W`Wv!=pݗu. jכv N pndXf,cF",pLhH 6RG*K߻Z=:;>fIJ wg*P }̓ЎQSd#̂#-5)PsЁ1VkJ%Gq_m,쑘-Ok%ty %n j8a헟9>%JF|^W#0Q6BKQfw%!Vl|Hif?bұf $⠾6] *_q5h=c}:=U\V݆ABYvb5BK=3 ~럩"_f)?eNF_tL ~ϧY/䅪r;'{4>fE2Kw;f+fj9WtpW/l>0؉c*E 'aY `QS/-} cw,#cT5bQ>&ΧiܢjzO>N#&b#Rye Yo#ҲuH ;7H<_6'.V#Iu&#am9Ӕ^c2.`y OK.ciC>bd}s֮^lN0ߧ͏C|^0#8A# 7J 7Kb7Cwܾ|rv,v(wF)MRS1{Kqe6MD /DBЪ}eWYG=ވq4 dvLQR %9O XhK4\XVvAOm9W0\:~?sVwsuEY^km`dS&2f2'vg[W./Y~dz6+޷m5;/$*c~Xk,!J$,gy4#g "L*&]utLe!S]Yȑ+<Ȯ`xփچJ]*Q- J*J"$W ˌ !"$[a:4n7EG&K#? |D*&ST 㼆#Q1pb|9`l#)׹% 6ǹWMXfX#TekX"fSS WmZoi,x2z 6d۬D'ul&$O"X2n](uBLà|6e ;N`QJ#9eqc򉑈 93vl@BB..%ܜ0=6r6ECJ6Wr&9ܡۄ*roGDbpЄA+5_4eӽ-}a|*sDOC؀iSAa*񖤳wir 0h>y޴x)cf/ P-/Ջ.Rr d 3B|n/1]s3]E`zwaqft <| Mz6 ѫstSv15qcz`Qs!`9 k  唼߾OȘ BV6䧶!G6 mO}sly; iG| W~d #dJRɖd #ں!oݏ~#2=9NWs%e|Ma~o9NE h@ }K@ՂRTWcʱaQzgdX%-hQIzXY#eT(j$6QC;tiE{?#jIN])i{WGz$.j*^??2ƘNM`Wq0,#7H|"#A @) W=0!ښo'\JsMdESŽdc(b|Q:F25" jB ~o(qms5%qSg֏&;B Yw .gzPoFbǭV ŽeCa.viy(4/q1ZZ c|rk* &ƅ5FV>1 '!]oWkYbUCMw?woU4y=#4VoxԹ +u$X :Vt 6UIDhU>KKρ6 Y><}?b_+̃DUǤQu Ow Ҋ.79&90Yuq¾j /%X:/1h&g$2'xqP/dOG;Ո6*JtBI뚠0o1SYzmL 5ψ(!"`C)M={FcKpID~ec W<vgn úVJuXG^B)@@@Ӣy+aXU \n~iýӺFt=MK9+X#h¨'L+a/= $(_#W[?|U3~ g&y%~zi4Owê&z6!x\']$OƍyϻVI!d˃L{ ! w'P7"* -eݧl kVL:n]5MA l32ggDw єT9D!manMX{1BWY_F`$iї)JE'dQۯZ8{3biC<\}s*.3&n/+-mU'MC&f~}Z_fEֻ?few9/S<;2XU{sҎJ:ݬ[ .o4hʄS$"%ǢF`B J#Ԥ ^7J5|ܩ9?ߦ vhc kFk ի5Fi-nz]g1JW?XpӇ%w ԙx;qލtbUC7Noύ늷Dk䎿!!jI_$N`-iFz)=/ҍ^Tzc{|^_)YrF5m?vүu Sk)%g-H#LD4d-rQǑQ5_#mAQ:M=lnȱg@lD^߫wB5y r#s{_]Jo_쫫(XĢiF|a{#"L+&XtEzqyT/a s̓ʼ)s` ){ƶֳ" tR hd@H'߾T7)J犳x{ unS$ ktRZ+*| kߴٟmԩa^OC9]U&Z+NsO"'uOF+YDG׎!"hN #QiQü&DF~KG*&b,kVE'lHShYDG@+ k >ePUBcߐ8J6'4n)eRY2f eǜ“5q_e]:FÝAejCG  2A(q}NIz4wzN+1M{mČS!\:F:K"g "?2F[̳m‘혳 MZzȲ,2aȶG5;Uu0 IF8Vٓ|'y()atҒVzمg8C1`=e<ȓĔddr+1r7yR]VS]c.ڀ]n3aPJ4~k Ԯ럳:MbCz/HGKejrR'ʑ*ΌЅ"r4q}X*JQNC"EE Xq8>>:\—\Q,br ~ 7 e`qX#& x~!C<,B/JM\7Kִ #hOEO8c!Adm yf,TJx{Q4Ga"G/x˔X@V cP;ʿ`JZD>u([ս_iAѻd,uƬBe@ (ln2d7O'2V1^R+Q"hYgm:xp` xLSjh-wF=-T~ray"y!k赕G/sP% cD7es} ӬavV~)jaHE=jRy5 TOI6A]J>\mzc.z2ъ(1hSGQ3&/CG[2LrBF6/k2]nB5 tgX7ʭ\=]g{czZ4Lc)c0J0nxoģwX4K,+emǘ9=I0L>).L܃O*>-fgO;83 UYc4ZuYm:o6F=cR; vt u{H4Fsc`6.6?}d݌fx5[,5$^,#z;`7}h^Ss!kFq|2TwF}0s1$Oi4;5)._6 mT O-ѩ$ϋ0MD׫U-x1qs2FOF6h {^3];m,L*DÇ}X|c|<i~6> *%ڝx~=sg%;-/[Xlw :q#D?|?b*pHl "~l$T{46(q{qd&LJʥa_AfNg?zMx ,x ؝3WA?Ͼv1S{M=i[Ǒ`1>^I#9C o)j^8ʓKήYs]`@%7d5tc]]3ąl~>T6AA M1Ն뺒Y_)EqyQG`nMtJVTta\;M4"Uq-bsG1DΓiSpC]+ΥuiDĈN܄kT3ʄϣfcUv)ܾmEeU*5%=_ j?, n P0W+\!sXGƘȭ #<\s2$5?EMm2>i⸰X:)Z!p`:  Xʹºj ?];$a;1H?rQ8"܊v0l<9C2b'],qL<lj0̈́N9M ^)ƂZ`aF]Y;unso,q s$vp,NlGBKYi.bsB[P>DԖES5r0#[ԙ,Ki,֚)NBN xˍ<<@t+gpqӊ! d~I.hY}01ewiD}5a 5hYcC)a>Ng6]ak`%dž䡣^s1:z2.!}o4.ƓAgG;de~.e" F(SKQX@#{wMLYT:L' 0 7{_\ڭ wYxy\(7Bu{Mpu| ḍ |LފM(Ka| xI5KH܍5WL6⃫Qnj_9z[wˠg?uۑ=iKs. hQ5Msw%Zʼn]󒄆pR>y4ea&-Ƙg8_9#AB m(|M\TmX(R[ӳxs>ĄwR6/]N;=!'|愤d# 裌0Vא+ҫסQ˖*;]VC2TJ[̧H%fT k֌"]cF;`lgi| 0,e4JbX]oq/B!0@ZXz1G,%ϢU<"001u$ cRI)NmK] 85dnl'R5jt$ G\b6A.YK͙7o'K %Vxt}(_ZVE_T̴tl~zL5P5`VQ]OG9cwaQK"FqZ1C&j0Kv_󓘄)ALc2O; >l>xN`ϥ~Aei :dEe.لǸj>̩ƥڊv;%:!5-;J^O;n #em.:0$muty_-nIu[yz&B Q;]_H)KkK9`}̪2r$lڽ^vi#e )qswkJ ݫlVxljIPZ(sHfAWL[>3}9cN%嫾q~&7jJB5L*G6YI a Ro>OB[a.dPCEҶ}>dаrL}7g Vb˭p#& AAdf6 -ˇ/q $^ K r6܍UnuP#1#4SN}֣#P@+i'JGA,xRe@sZ[{RyjG2>U8yo:yGhvX g8m')h5*I ]ln(5$be 퓮./Ib:-eDQ FsllcTsP#%Ȓ%k%X t0J$i;x<"&J,\#OBȑl@'"c3%Үrje$4\ Àk--e  ;;`Z_/Q@ڸƸLP7WW342B>n>huɰ,|f r066#B_Օ*_W`mǨNеčiYǾB& ײ BR}`c8+j/` ֻt?SFiAMT(!,ShDS{{v<I1@ӹBQEܽ=!/+cQG݉UVLVFgYQ(XɜjƠCa僖*wonc:]Pڶ0w!4}ƿIt00&j DHaE^lfE6HO<$z=a[Ҷ9fp!B ƙMtxȕ֥ )][p)7\X~Ƿ,yMrzeR>wbA VM|{R UDDuɫKS1Ydo/( &d +RgU )a0ZIg:R9qY~4?2Š+3g.d<w 9(sO\Fp ([i=ZjUDi)ڭn[:;!kZ⥊ Z)"G& 0\J}.yA ∏h(0U~1d_Z$g%@8|ό187o T{X^%pxVrac+uf [b0:n';έMmɯa?CEhk[; -G˩5?2 IXeg0A؁a6lU^Ȅ=ں (m_u| {CE]hqqƤhvX ڊ.+[c8.|}*8W"ܖ- Z”m}p5MRQ c_XCj҇3cif (3 e a=ԂWs=ZăT oT=4D.o)խVaD0 >N$Z"nU~+A88A@&ι1\w`?@}:FJ1Uvٻ6$W zi̦!@1<4,I$ۍIR(3uH*Ee#22b V > vhaBE!(Z_Zisۇ<Φ{&xοU)sbKhUð0-RɹH "KEs&L~,[l14j܍/+vZVxcz^%$jr*G)G 1&7ϭSƬ\G֓ҝ/^Q,98z!:FI㙕hI/htc1CcG1x㻴txVg ƕiahtU=sQqԔ\UZP"e&/<&2F%4=PLFK+{+tI8+ǤCS)P s,qXIi1#sJȿplz٫ =ׄPng^(.FZKriO-%4S^uB+Zˏʼn1}x굄 {Æ;Y|+(Úq"Л9Q؛l,5fB<ΑQC77cih<*%f(ҋV:}$MIv-# ɩ|gooQ\_0 W;ϦU^4C88ufj``Y|tG < r~QyA.1WUoߥ]OP<6s+\y8'{gڛ,I%=|0qg Z $G]AJhjwd׊R8Dն!kb ⾧ۉ]75xn6$8{F'^U^}u(1_+[]%N9:;r-4~mv۶}ovD>P%4zyxw;UHƴ-˾ >/p4Ud+na[>`J DèLrR4,+ t[h!6.ATrd8dr˻{&7̟g.7b L~zRZ)d162ZJQB\3<@LJ|tJ^\ϳ:Ea*"6ܸG4nB#xZC٨慹?Udn\JmS$SY%bb*)+b[8f%7^(; eYInjGcqQu"hpwDuA\tVpYQKhVs ŝ}XN$su>tZ!kY ie>%t ե ZM9ro vȦ|HQ]Khuvh/q@z åt[ݲQ)J݁ I~ALIiyY" }`u>)pZ3C "t/hEh+;zগ:ZXOv_Zisљxf&9[ '7d{ b >z乼Z:+,*fֲKU˜7دj=@󨛆{PqY[0dq`*gfAЬAcm̲jJx{:ST:trу2 k('FeG2RcWD%4LpҖ+vQld/k1Ls.` K$*]  -pUPO@ja]H bUPCh"}io#^-at$~a10qxhW(apx"*b%xH%[;L7mL5beZQJs 55 n*Fդuz\S2P|äm]a&1V(afoдs`e~]6^x؇P~,Hq-_Z{1,☓%Ihx_GI>so'>yG.F C"LW \/WooO'1,SoWXk"04M~:LO2o~nXō=_fmw9]aG Y7$-?}ԏ%䣟'o?#F~K+B)iM;Mw$8MCmڎ6g$:sCzkImgtos hI M1ZIasmq{mCٳ㮢J1]"yzcB&aIN[JG e6yw v)Z<3~_DaE4jlfxT0dq4 +Vy;1th+KtJsO_QZ7y!Хex ?ZbT433y1}ޔ g#fV‡1#P #m៿Ou0\ZЖ]Cw*Par#vcT@,&qK +5RLeK4^3owL/@L4US--?Ї쟓u/~Q2L3G{|eZ.n`3yvuppi?͹.x32Ќ>f~1Zt?OY:vveqJqb}7+̢+T&cL-E[V _?!*[BUg5J붅n Z:^Bw%)9M(dG9pKe-n~lKy&-D}E(gT2s9/j{#TI 8|[%qb '0Ʋe0SsYaVLŴF*7EūODA,(Sރ Bh,IT]|·5ܥ}cOYt>&ݢ?ۗ܂P7VHƵVw%HPKe=8xk,ыֆ ¶KAێiȿB,t}x!A».]6~.1p΅WV "9 R3gQ9ז4!hLj.3#c¼G-g}mTXRppu2q $dH[M!Q\S2ƙux91 y*ρ3f̄)<"„ V1<9UuS8sbkBbX87Y;R)X)ITҬ7| O~ZC@}Yqn!&ȌA( TPiX8%řq=\DJ/&!ISA cYL&Us岌eYfKג#'2'3c3pDsŵOkepUtǠO(Yz4) ,`l~3;=j5>[ɵwdѫ.|ւԷQkFC2qcH=>(mlm2]ȐK)T7 /vZj['ɯ˝yU37S]O~?G77< ssE:Juc 73}Mrd'i-ޫ a/x'=ͰN~}:-AX8$yx_fzQW!"nEa0h+`̚3E@k[«c%4UA[fqw(P(ߡPC][UJhL@x4I )~!Ec6^a*sJ{#fY 8swJ1I΂E7\`A,a,Yr'}F`;⡁ղ0<^-G]",1&,= z!ދlQ` Gx_/fḑ{Ĕ;A4EAO ra "\z"c!qt.<,pU0Q8x_.o'Ο:,`X4/92Wʂnhmjw+i ~,PwΜ׋D_=-`WJϕ MLg8A`O,*$\PmϫQ1fu˥l\wi/t6]-Z d5Ƨb\> [> ^O&" ۳3/&Q=nG'x^6 1ʬ[v'G&=8($i_}LÇ/W2`zH7 AǂEDθݤ{B#TmWEe |_hur~>;RB"&0~ :eq6Ӷ7VpV} (cT={ n0Rw{vzٽM{7 `Ҩb㟥p#Rnp5vo? #y~I~TۯIX8Λ>? 67γ8}V?P}_P!x6sZy[H{fjB㽘HQw?W\q2RRi: mӕ20\셷6uNCCk${ܽ/o&_L8>St5f'u29㚝Kx iBdxZQB~ch5 "tjL(G};8]\) r ˔h fcMk]5ME"lE_|ެx&6=J]^O%F^X:;?d%w( zhIr2o$p.rȭTBbH-g_ݴV{t,~<7 9\UsŤθQ~e#9 hf[ֳ^4Hl5_;!|Irhɿ>u`ha#EmL'g:օ? 8bl`"c_&RO}`Z~A qXJ ÁfJp<1}!3%)Z&${bz9fD SX| ̜`>:=$ol ]UY)li+9m|x+{`+ԊT7Z afwfP4G7\>zA^hX{`-fTkT Z]!]/YR( Tk$2itpM n򭭟gCgn̹g/[J=ˊEľE-gny ,J0,8C{knJ9H >( T2)SM5WW=r.M @IR0>mBW禡% >7RdV l(弑wXj)%G.sQ8j51^A"i@[6K7Hy~>-t!~Iˑ,Kusy+Je!_e9?۟qpc%??YO_4͈_ev~|,F́\5j֯9g cRwv9`B \+Ay&ұbNksߛa]%\N?+ȚjU0IQ%]2@dA1Ŧ|8jZ0|I46,`J2"L08_jwgg~]1 ٵvd{zYkvvobqU?~ޟ R_L_r*kپi`y"N9  Yt snɳ^yoWx;Uzۿ;S|+7!% i1)^l5cԧ nLB4,1 L*ZpRSKkdH>a=N.k+r߅]BV}.޷?aOn` uP!Zygyze&ti(b/{X>qr)3Q': OVfutM Gj0^%ۉy -$o-rV>JV{QD- Ʌ@(gR\k ~ʬF`Kޒ`TAGet8?{sDo9279Ѳm%LC'Ih͆Mfb N;z+!\HJD>/No.t[,N_5_{?Qǯ񄓚,Kmg’OMO7nf[v_N>z3 XLG&F!,8]6zv~,;a$Ë#7|I |_WT](e(h*E9 1<νVck,(79W$i"1Ts,=>JmnCuNQe35Km()3kwÖ́ƹwZXKZOIEX5T*:H~L\M JMGFHlj°@f J)P}ߍA&}Vd*fBXtȵ&Ķ$eZ6i)2S6Ga57cHtfpO{d.&+].{ŖY$0EسSx OL'nkxއ7M%^]HXl0Μ-+nE0"X߂7iM>m4%ƢBNU> ;)6 hwD}xgfaїtn9ɾ{JI>^m0;O)crtw=`1T*eJIu&Vt ٣&PҚ t`S m_{3b^hbl`YLzOu51 SK%WإKJeuh2[F)j,>A_u0tFr"}4O{~1a&9Eyw8HluY0`.EЬf Ɯ{sA dM\jdX/*13g?̖4kw/ AќwMx0)ԦFXZs8f34c*0^>9+r>P}:j);`cri%9d5Isl. Ω.fQj>zFnDSsRѰ 9ɍ|MM\" [lꍵԙykW_DػqUL3vf6ؽ|X^} S-$?qGRSmyZG ȽGLd!if>~3فz˅"ڧ|y[simucP:PVsr`6Ԙ!ecU4W65Ѽ3WWrơk'fC83'^ *Y|yͻY] 8%LZ\nb%&\tRљ:ԁ9(^x1 v9 `ar{FrڡU1)ɽbw'wh|7wh|7V[2efJ"@eJ[67sM5mtm :4:BYεEq;PMy3c8Z"vny1suztQ׋zV@cedMQd;KX{ϧ.٣!#*]wa DI.BZ!xlvfgΰ[*:9il3Qw}9l'v7j漡kDplnvmQ_ڱf$K>F6jų֠eW|U=m;=ػrN s"vn:G CȈߖ}Pק/J-Pv88Fn94Tgƻn΋*pQ-7бi~v49$yNԎ y>/(0{~vE5my;:-7P} 'gCh8~↓/NB)\9 Ac8Cr~;}͂]6{Ά.F]X߿|Z[}}ǧ{8;:!=Ë}SO /CNxA]/OLJ 2d"Ț<=hxYqT~%j1.5:Z43ȅk;ݸ|$+y{ ,|5x|.75n+ʧ=F|N+ș>Oߒ}a%fYC}[ 녛!OF/$w(h\0cw ^,ZB4"ݦ~9*%v:Qwj`7C̓KWڟ̹d9B]G/}rq ?etU嗥4YـC-0?Eb ~ A|u59 HF чM;/8r)S\h_ߖ{j8rOfȓQr=6ljot7@.]<>8Щ#~4{.ѽpqHJ M3mﶞv=7 !+(!(ՁC,E/.UX=zyZ}= ;g) GX|YM=ՋcGS~Jr3Pr d jaI`@)w )T5$&f^^qd\}Q\)9>on:# c:R$PѦA\RjC^mVmNHS4fVRX\Ra1h7Z9B5Te(\d8hKL0>7н[}/} Y0-Lӛ?~E7M^'o8I<)Z|IAcrh99QԤbݥ\E8YuiC4,j;Shja*tyCuyZvOG$ѷw$؏d/Qx쌾3%Y7Gۓ4='ٹV<t޴6Wl\LŞXܸre?hUc3YC1ɻ{?5Xr1^h]έHJT){@/=GyNG(~%ގQ{}5crO͐oX<-bz[77@޽XW*IxF[U*^\yS9zrdJh$Rjj&+*NVV7σ𸋑ˌS7CLJ#|nibhe; ״YÚ\ 9v"L\f?\~+bMK+&mű|Qo'\vlӐҨ}iɍj$d:\N g`*TVMcOOYbDFwBh8'f ] ROq˽q>~F[:JMf2ָGxѽA hp~0)AqWݺ|g 6,4V{;i)%f@#϶J=Mi@*ϪP Cp(>( BNwKbIbKSNΞ7e7V@=Ҝa@zPzwPj]ȔnIIM^Z=$:(%#)rqwD`b/18@xNxKux\߹͊gaȬXN׿7>No>o_wwJOߣ1XTLr cv`S:QLN.c>5 ѬϞF PRV[ҔoyWJ]i ~ 9>־|z?BqYѠzoܥuu2hmI\k0.n𶃖C0(VWA8ɛ>pl!ENedqTY(>ADDE0 QFզ~f_jtvUbgP;hf3^D G><3{ ̽dT5Wn{)}򵘫)Ob}bϣC8hw}0en 2& =j#R-fS+Qm1?b3ھ4|0+{j#~~z~9]ξy8kD<Ϲo#L,YV~3Yy)dϯ4MGYEJj~Ey:(!VJl7Bu aM 4a^'H_'Yi|`K dV&DߌG5TKEAhقP5HT+kH{.W\ppF&eqT?3kS[|[wTP#OTOCOzU)R08qAŇClP#sX P7UBsM$ӧo-(%r2JKIf{iՃB&Rf-g7%!;ٱ*MOG`4#;#vj# ~3L&]6sXN;|s6όq};$ﱤ ɢLbKȎj9l$δ S1AKPrnB cVs-Nš4jsXȈCt-nπ6onEhQ{k5DPqlX9U~?!v D"!F3p6s"5` R9qb/{fwmHU:PW;g`~:8'R.߂,Q4٤I$Tq,MtPQ4%v ؁f NjV`#N?ж\އ>)snufH8R秀ќ`L0~66(l yb)T g@v $>!2v hl:}1 R0kQ%=v5qw 脞z7t~{!a{{[vpvdž3ϴ)7Ww]9Kk?~~^4zNgS=f1']Yʡ M}7`73k>aD#5p&.+:E|ijTT5BE[D36S&愻/A$F(90-)9z  mo$JvREu@:us}T0v1Xj>K1Ŝ\d̛QISߝA9/Wfb\0m 7"CB2B7x.ьkFc1፝O5_@_zJOWMwbzJ#ItC=O0qC`3yrY cLh|cSyO 7oWZ#&.#[t}-(yuw4r-e#ٕ&|*vj@5, oƎC죵6|40 6zp X hYV{鵀,_SM(dkZ@sDt9R 3?d(Nn<b@31+$r%ٙzÑX΅#WգL4b[ 3>’xCү6_ey!x11V vb\vOG+F ;!G4 w,}{DZl!J~д%IkFrJËiP[Η.=j+JSM&J[4zbVj&\ }M8 4}e`69POrLĘۻieޖr8oWl#aC'M^FaOam8rA5P喹VP쳧'9daEm͒˱DCkcdmP93F?eS,9XNׇ1s\k~SO14&_[ȭ4n'UЄeKSI5Ԣ3P\kF"\^ UVy1䇻 eJU݄C'줜enHRqFE. LQJ= )Z[K:[h׍q 5yˆ]+^2DZkwV^k{#SN6vճi>O^ﶀ5Fi'TN݌|ZNRT8b e堦 ؍oLWJ&R rL rސY&F`,a˜1ژz^{=}Nak+R{{Ht52&5KaR8"=H4e$wmq+)',]N =C>p6viFMSa)"<t\W]'_ oCJF Nٖua4͘0dy><127Un1'Sf@Qw*=ǟOm=3't}1̽Fٿ(l%=Q[=1f#{ɀk}ʷ(~5sj(!32/QmF+hч?QS#f=c),OkI|v!iv;8żcֈЉ3󑘁㣻%㱏\+y@9 9GP7`\GHt0\ķޮ[ N {g%eA sІh tq#>:YgwoK,G,j/ڧkͺ0P[K.CIܞPP1Q:rqH3ݩ"3S3x<5R^W_4' zØ) {=s4>om!}WPlMT{:L۔0i%] ^Va44s-(y$Eh>c{Ss{V*nTCͯ6f܏$D#} Ul*~Z9[xb?wa>ߗňђf\{~xo*+{m25 ltLNGv#}015>0 GlrL<~Sr.=PrqTI9%\&ݳOgX4A>vǎn\Al/k8 9LUY o$gA k0+* _:˜]Fhܬ|3Y߹A59e֠<' :Wѧ^\9۳d<),ACUh=m5l9L( "crWqr[ţcurX i74p=!gXK5ʂjfz޶g Ⳏo1O;B1c< x菝`Fzr(ktLO;l!3I_1@II 4HN,N@1pfl(@șg .$4 kzc2\55E٩[# _AVY@a-xsQtBW+tVA5C;AxU.j4'̈: ^)S$DBo)p\bpUGI Vh7#Fv{?7~Rx] II:V*Uם髆5Sh'Of u;@R L_[6rEQD=>Psϋ]5:T_NӺ*lL,"8ᕽS LALXT4LCShɁ䨮L\BNdG5V5CJj֡g7bCJ~A 7Ew>[2*Iv2,d40cpuTj_Q)\*utKr.PIέyꌯJv8<|w_&v'~=+sow$$^[f<d^F/U$QP05&Մ(isI$4i1vńaY-[)Ӷ^]6RWovh+MRĔy'nlf[̧ \_oUMl ULUVD.,nw#ɒc#hB&X .<l.>97t}w5qY_vqkE0Ao ~kpInZ˥Jh}\SeWSX׫u=9%ϴ\07.g +u٨&uw+l%rI657O@WZa"}#@C7˘VU =cZwXޙȱug:_(#UkѲ2YH(G:)2Dx$sά^ ,N+u7@}`I)XPQ]r3 iZ1]UJrH ~ 5>!єBU} WL `S'i{ĹP-h`bG9>XU QpZ,IBBm:|${-"7;o!8]Յ4c^c, m7o%2VxRj 2#A1e#}=ѳb@Ue ce,uЧ99Vf#f Cڭ>#kLxx7t k*D%|{'L)4#TZ%D.UFV*1hAS7z⚪ќ)sNKu}B ,,:ՎAjLPaNjAeg ,Lͮ o|tqt(8R8Xc3R\gYYL貄jd̊BrgK+`隩su5}/7ILƑ@`;4!..Iվ_Nj@->+Aъ.03DC}%aVha V"QEɱ sn0ZA0lhoJnTWR" ] D>S ]𘧑BjHTl)eJoJ{mm_ksڔad# @ω<0xLwe)f~$II=>B$w/-4yLܨ.XYlTIcHQHGq$ef+VeV'OZq *jM]paBD߲1C2J0H_Q!88pV9u׆8'-Z̳J,8pg5 m ݱvB[&LPaUH}USBBD_2:f'|sJuNA(í< p545'A*&C=ͦ4EzS=vrm42cB`- "*f6 y$S7as2a yM#5y\rh-)Ly4tPa**i-A[o ٝfGl4fb.B-V"EA[}`W6hvG;$Mp`N3Z.hv+M ݉kvFc|4ѠT-oUfZB2Ǣ;,'`ì!0g5!*T |C O]Ɨ;'Վӊk1O:Dn 4jg|[9a5hujZsƄ]P8v'YIZT"ϴ?-Hڭ>dBZhDY 4qH\~x>E{:_~yFK>+vJYݲɥ3H*.~pYrK}*2Wß8h^eYk4UJf9 kA?N6t!cp("%3RHs J³7e !C;p ]uǢǻCt`>~^nP+3W !"ة_ۚ流̼7Glc29$itAeY9y$<,w.LJoVDݛa$XA$̉d&63nR2+˅iNDJ DL޼!>qPoJ0r>f;^A~zL~{qDԺˢH)6FI 8~Hg]HOKҠA\p/'q]fԦ܇2t~KMߧGOwO>#w4jֿE݋C_97Qzpsa $V2c2ӻ7ņ^I"vK"0AT-6N֚wb ^z|CNK KhʂuI%TRJ6>\ʠU !`!t ѧ0Ls0d f 4PH(eAw:|wSO' ]Qa͚md饕YGE)Xd3G)őWi+e k0O*oQ*؈x]I|7a&f$`lU4X]̓ ;fN'ɖe2*= hS0VhL5PRod?K*ˢ,rn"_mI~W85M%Y}?XpӼefycx<@'kYAP*D , B_Yq,}'W~z%5e]M2'.O`zrnF\fvCv" Eg{2p@E.'?ٴdNO|jmP4F IC=xaӧd Eo/.?ţ˻Ar9/_6tktY(c[rm:{/aٞ'pEo 9< F1BB:G kU2̰ w,Bxzl~5bԊ9vהXT[Vآ>kJheP(r I.TT?. avFv S 'Ja8,H-RA{\sE=>]T{~){O:$odaӷE,# Dyő,i˘I*D}+da]g7^Dx/7]i Xǧ~+E˜-Ūh9ʊ׶Ke5(KTmWӏo3mWvk+;5m' f% =&jhxp"{$ri&/=%8Æ$nFߥ3}2ڃ۹I<,:,Q$N50⒅SpՁ"påMk-34RLK7{>(x3l $=U D-wh |LX5S9oޚ & /O+⺦Xzӧ|?H=9G%K{ֆJ-:8_Ƕ] J{.<+W?lu|ʎ9$ݺOBȁ 5;of ^gRů~`ϯ|П#G3MCB쥟/~D6SUCJ:砷޻#=B&K iŊ\>]#P+P`snQR K C}m)Vu "_4"I r@KbU3 1hvk! *fqSt+:[ EOOh"S-o7GT =:CkR ;psԿAy>:bjB 7@u]ȝG}ybx?c۾CLJkjbf> b-i GSlr##(_r:[ 2U|,eeO/_7{VQ43K.BpPuq[Yp\(/ojo.ۅw6?;I |M7ѯID2PZ<Ƅ&9&*ABJ8̐D _хԕtɽN=#_^?q9yuW*ͯݦh^jT˘o4DjDVynusIThY%2Je&dBΥlAN;UWJIV^-B޳MDA?{OƑ_!e1R݇}X$ذATHʉ>IyI,"#-};] Z,$ŖRzB mLFQ除͙QNc 5OoYh$R易}?tB>tކrC2FjͮՁ&_㯪 ofmL lQ.W1>\|_~["y ҳd`gd~*LI,A\fȉq Mq0jRxyxA%A'>NJ+gwz3 C7G3^IGfWwvT}u8E ߿"m\0U Wò+m;O3zs7·~:]U?{f2?Ig4Cz S\U.kz{H"E:u3h\LzX" 3ĸM$#1њqZrH}7;ѩB>eއ.+t[IjhӴ&fV1Pm1Ud7ۻhԛŻ]>rG)^vClb _X mG|gnۃR}3`s_4c`Cm!=:jECpb``l. &+RrV<}\@tךVON`8$ёdQ{=1)RDĤ˼'["ARp}pZcǟMt}6 ;6P{-tAiMBZ [ V3՚"}1XB^{ToD¾^ogkǷ}#{8{սlƗɜIhy xf<ўN)D0 .z{5 ֍Y@=x}X2_bpg3u7摺yX'yYjj*u"D x`c œi*[Am[X`l&҂!s1h^!Qgl^b8,8h>Ūk_wjmB{ iWI?aJm sF不JjiE(<1f vP+eQD|1u,8cLkܢ KuV?R(KGxjR%UV68tۻu߿]13+m^_NFŨ,w|/n٭W1~2 0鸃ޘI1a9=qa׉aQ &x`oիFo(očVC5}bcC @J0 )g^{Qu~lAo^qݝ1O-dLH`8ڡG,zMȚj6|?x߽&566;~F)ָ" d~^}m *m^'|ѱm%.”J) $EeN;E)cƯ6pd4)֊^~T|*0vㄟޡ 'Һ)Äu汗^bc/f21q^P n 4،.;MNa+%qm s_2[l0c<j)$-׽}@JS1 A2뜵2"Oo?һ0d7ؽYCMmB[e]~?/!}|Bytԓj{+HS`p3iyq0 d`)cQK,RUN? :t(A %+m ϋATj}Tl˘V|.I6"_(藏tnX)R(rLpZWlx#D m4z|RN"S#t4>hwi! gaylqE)=PD[Q|K y,1C/C<_d)p0*PǗRRZ ;~.0ߑ?rhRvEެZ+8x_?ZnF5A& PBX$,, AZ,[êzոd_I^C.;`SR`d2:9rȆ_T;qmZ\= Bw0'}ۮ./1 X^*:Q?u:o,[C`_(`It9Ȕ+{ E!̈D+BPǪBYf4 TDF m_v$w}ۣ\f cG) )J[REO1/qfS3wfD4z[<וXK?yN"цw<'GFD)2'Fx'ݰQ-J&7^lw~l,ҝ8^dc9`:ҟL3ٍ  c<.6nf<Um/yHy0{C 0Xq\ ([?rQjłAD68ThE:LOhrC[qX<%n#W~$ʏ ˏ!'uްvMZRBdpKJKu,/S)M۝"l~#I&CoW1zYw5F4.)O D01/qTLCK(l)QDs0 W ScAV⑳L*O`MGN?N3΁ys2?>esPRjsMفܳ4rM h:PPcSW2: Ƶ ΖjdWnJxp{ٙ@'?ElxJrlߔ0DNIGJ":PHeZPB/uhrD_7=ci UK:q(Y'RC )G bQ Vǚb5cn1bܰㆽ75$`Js DP! tZBy)q"i({8Pw#EY=r_2e,hˬX8GN)Jl $I{RI$,G۠@1N \dfpL8vb1}q gbzJ[|S(A^jJG7?xLLYܲ.LQU˔kUݏ P"} ⪻&Qob|-&CP}0]4gZ+\Rָ_Yܗ~Ͽ392^]ʹ>I #Fr_oFi]"lF qFƬPbNt5|֔'Y+*Zg;K,$Sf'N<\`A)'#|/1?c`sJ)Wy*2\0*[N Bϔ?@hϚ)R45rλ)33?N'HKf2w Hr!ǵzv~io!rwcä]&=gHF8FҧKY: ؼxo~g\xwin5n.E_~y3>|:3ܞf!i'Q[g]sHg9$X!g3$fԋMԋMfnS3$h I!@MX.T*R`Y:wZ I\%g(+$۳fə:̔uh|JY_Mx|SΞDpD#Bܑ=~[s*!௹ً^f/ze^fSI$u2p,+1!)nuiBQF q(J; ,%ҫDNUrH98Z״DRF5& 2>``=Z,x.ў ga+%q+Zp)s‌{׹ x_>i _,%eC$1)sR 1 L07a#(CsϨF' sw5汮 (!WX)>g*I U]-,5/b ˕m)!ٕpӕ(\qNo)G~㨋QU(2SuK @lmL? f oKNxʮ&>Ϗk 7ZM̱ۈP dM-Sl0w}` p'|żhtDay9Cw.]Es~bTE|NoL#9-3|N VʪbiQ>!2KfAAOA.rF u`u{!!:;UVgh* -aCÝ[ҿ Ђf'64݌wغsy X |ДB`h(~}hӾNGy,uIciH@tn< .kЕ[^Z1/ ?p}ݹoT@2K VO u@5^:x<sZ! -_1]q9Kx"ti4(z-o 2sh^ujUM\=۲oRdqZ/dc $PK(΢RF* \ -0~u z%2*B=9Vu|mˣ>"=S´j/$,W]KF7^h׶ufS;&?km;>.\MAu5q\S}uWm&!I *6 6`]E\^ei? 2lX`Gs[ hP BҮ?<o$;|.6PʰRr찪1DIuZb+ `bTH>GZ9NT11,,VƜ#J4$$8Z3## {.g8`G 1,ܞVA0u-êtLi_U˰ U*ncy hEcS )sTU\3=g-~|eM uE(kuic"ګl>-c-iKvWgW~>x/sovZn.afF)bݦ^vOicY| *_JCB+ͥZƻFsMu7 <?U)5X }IG:r[ =ИvIi[w)V;I} @p)n\$3a|̿.t'&5!<1T[N#yh$8$r+pdc}+3 V[tqQm{6.~d?7۳*=Թɱs2șB8GI̩tO~#;kqi5+8*e"% )K\LUTW{ @\QYGpǶ/MG9:_xTaJ\tVYOt;>~4,c<< Gƺ63xrv6gYO@=I MLync5QX3c"~lO/аsvv|'ӫY X7:D_fuǗWE|8L?5|#?]OОw+ _]<})|Uz˿^OBf__ GaݹtE昛rN[I­w/G^f"Dž(\KoIx: F7kk3 [6`Yr9xg++>n|zѨyyvҜxEvJKzuEU֗^,K~aiaqd4Nd%]{V,օOI-JO_0]<|;ūi3+g}?y9qe=]~,OeWw fW"" X`p 1"BaL9W9j 30[+,T3Βt.aX|.C>'XUYro,ߞ}t06|Kyl(*rC"[.rɖKG6id%[.YK2% WI*Bͣ@31$~X\2Jsv[.yK'%=KylPD{f~$g0N$7۷oKSH۳lb!Kk2kL#=SR'{='Ϟl1h2ߎ *0ABA1 vmgn]Rq;O+8;OTB@;$$0z퍅3F;ФNY8M s95(W zr+mlФ>弤+w&Mr v)WTuol^c-0k,d: LD;(vGgwʣ|]۷4q- :\]>K7p`PҨx;Z9{4g4/1o>qsI͠Lf݊bsЍ'.+w,a{e"l%^P U.Dڦ{AsɱӂL-S-{}$H TvދM'Gk0]/`]yIo'Gqw3jHqODy9=d~f|'NI-WڋʷV+.Iê]LVŖ-z0†Y%"Lu4aغZ@H($&X(TKd$:*z'-_1(PϤXnXOC-vU ά0a AĞo~]"Wl~<,hkM\LR5r'Ǟw&*n& sVFGW+jrJP6+࣑ S ;%g1KB!y:3N>;`)` GiA?\)aCŻ+t^SMZ e:`VYS:_>| irRzv~.t#\ QV-;KP՞ t jS4 *e2_^̇R[gR[iڲMmyF-'qԖmj6Ԗ;#'%*As˱Kq1:!e5XiM̖;侱f>e !h1v$$Wg$`O[&2sb>%e-l$w,JHPD !MDD@% -š6I]}g%}Kl~†W=Wܑ{,r~C?.$*EZ&2 aŸ&#W˄[&2BLX va6h1(P'1aBd4pE\ږ f1a Ćv=rAɱ$"OP:?<]ܶ8ET?:jV@ uF>acX"ߣE-ڜ=LZ0aبY"[kt8$ahTqΉ< mכlow@qHz7[!r=uBp9Kڏ+L33BXh 8E3ǂs+OLTlidз +Uw6ncC.Pi5A*;`R惝A`?}ݧ)CO8x8rކ?"`؀5yVSWLߤw#'A<6LrT4\:aMsO?\8u0s*0Ai1ӗ;Ŗ By ԆgӢ5c| GfEDoF.~~~~;"dBBdB#cC^ǢTD:Idl 5&a*b$(ď@b %M$J戊 ޻N4.ke;,9[VPʽ99.5 _dY. M.Zk];C\Z7r߻H)q;{JjٌwܸBiA!,}#a!h֦IdJ$ L@|db{<aPk4e~\ 5h:#)C| ȉ"#g~PN"ɴAy{P~,he>=JjX fzDFQ&IPi X)y`JSg [#gM}K,1l*H!I+&w*{uwس'M^s"*H)y6Pq,Q(o"Dɻ[=#v1 _/آ֯'+e_-N(;7A?!NA/ӕ.=jgwͨvD5Œ}aPeӁW{eq5yP?LJ# H"~gtD f&+*Oy8p[l87 B ;N͟9`<}N8FsЋ6lNkޟa86[}rU4[2Cj,t($؏UmZ3(*"h=3Zό"iaGZMߌYEW$%`!2< dD !4279k}3?ӱ1\*b0W g*qk", $` d&:F<ġdA/ϙ.>iܧi)at Bpoo x? RӮ[p?vj.2rHl3ȅ9*Kw2 N4.f4>Gջ*Fie9ܫccOb׮Ս{2\/P՚PSph#I*tj&Ih0 U})V5D`ԬNKݛO?{Ƒ_Q]-%q{$~ ~ƺȒWI_5IICipf(JHbr8wMueF15U4;<ȹUN΁%>Ak!)h(R@`ZdEqx4)"( !U}yu_ycZE/*3Ƥ[Br93y:aݗK9輔K9]ږD.QS , v sVd]P`3pGǍI>VK ġߟSIP\ۃi3oi^ym6(ֹ83^%0ik#F)S&ML7RH[C4Ӈ>|oNFΌΔv{}]]^; ZR6ڕ:̂MJ&7Al,jZ¢,hLY4*F dD#;΂̡(" :K{`Q1퓮z5űkcǮi;v5um3T5AK3f?*lŚc05(К:BeybyH 3ET %i(dE=ԳeFl\]Êg?ӑmfCHb?j3^8IcN͘OGNGo_}3^;$mI7:@NL)_%Wg.- _< X,Qΰ1mM/ 2Ӈ_]x/z)=߽Y vſ*~zWso<&!f+QH= <]N ,3^AQ"0j[{Ai=謷JdLRԉ“b LKhk1IB&̉d23,w,!R[ۋvª^FLz?`ZG(ja^FkE:sDIɗl&x(LQhōo̽j{#Q𶥦cSOoЩ߄IPM%SZy#vgv$ N4WZhv_:뼰Oʫ~/42,~^?3@?Zwz'SZ~R[~EoH3-' lh%/YW$7w+軥|{S2c?\ۻo..n?zmk#r( r ť>.Y~[}{DiFjfS)>\byhQ"8Hsk2-[=KXg#%V vrJV;uEfZBFA[[3=@`#t m\:_{G]19ߢr,-,̅3} ,006^<1<%'ajX=˝ d%+rZ{.ݱq: s ~Hoْ2@\Q͟-/= GR2jGkZ)8$&H\0rQp<)=:^WbNZnᔧF(ؗ,2itxֵw>=uwG =YUAKA3Mǖ՜Ϫ7|:5ӳ^17HWb.e#۽1ͅ 3D菡];q̐1=ѳ3z/L+68Aqև3s㏳C08؜=ׇwG}_dQqi$Wƒg Hcm6:{cO?vzƜ?VqA@څ7%vAb3q=g,rj=So|2PWRk9F@ ?9F+ d i,S:%.; !a#=?(m@:ruS௘bN|Z)&Q٭"7#hZ>PݬT=5OI1Co,ZpsY=5M}VhRH},AL)wl:v,IۏlZ#z;c;8vdvȣD9Etg.j3pOZn,Zr12YNmt" g0ukD1dG0df:Z>Sp:5 +GP-8",%Kȿ.?x#3Z,GHF闋czdž@bv+Ҭj GQL~yv= 4i;ެ}V㋓\JFMkҖpe^b~ɪy,X(/j` SؗRC\yyyNHGjJjj+0pTuޘȽkEWxx 9+cj[CYtoBT (4C&gbVl(n%VwҦ(\SfWŒtj"/zr0`AىEO~Kd:QVv{JϟMsy~z*)JMy)\߀v#̡>i ao{D݀0քd̂{jv):wy!ċc/$ᄅ'wp*O6`+xyp>v9L!ɑoD{vkZ%/+vh>xQ[QR3irhҥVjgp;px]ȗ&a#L'D$[d3ЈR7HRkt@.p̰5ǜP1Dt.%Rj~IǃMG~n ;*Y;[p-)Υ?+ [g-4:oQԾu )*gZP!E8-z )-ls%fVZhSVUUllDVnDΘdStlk~.t~/vfK5[##k?~w5iuiUcÿ'aOICғ>,_yh^H}zЏK5*_~P= 1Ah 9#A|bk oو0f%"aƄ̛3ͣD,HǓʊ\s <VuнGrH S0K )p͆"uYS< q#bA ) k>V]\vRtlN}1 7si,^kQH$6ڄ>DdʚL"ν׆TE0u*ŴV3!OJhɍ37mU.8Y*2XY#ʔT(\JLDy\3iTH.0pE' grO:irj{P4*/\ p&d1D EZ`6ZU 0!Q:x/b8QE %Y6|ߺP3dbʼS-?e]pܑ~GVqVzAKO r҉"9_VAvKCqM?JUr"DG!立_[ҧoYoX #ycsmБj8lt7=H^@򎏌BirdIrqPcT!lC=E(dhARnI |oR>j~mǩ5( Ť8yd+ vfBfyԛl<0'Z8`*׆?!bn[=Ba0)X>9#fjf$Hs34F2I|ugd蜺iC#B`-NDJ23vO?8XKRMms-q`aH ь>|[ِ%G^z%Dc-FrAUKZ#kzXA`lIɀ3ZB M 9e&Ǹob΂l#HbdF+verE{A;  BH5"×tuf}+/@e{(x h%I -yV(2$ND/SrHMҹz)4d.w7̎2uN?.w⢴QF] )oW"#.mVQjͼњuICKgݬާr0V dbm;>$qkrx3ޙ}yj<]x&J2\ C!RAPW &Y {@>EC1!Ock1ӕ &#*J:(!5RG4Y!&!D  MHu!R !$ib<#0D;9Cqzsr]0$)dz!8 )Ow#`uJ"`슂Kt/qק13ύCS2_'1 )l4cwW$*5&LpY;ÈJ\_E4ʼIP=jVy)TV #~kqHէAƎx,Y.A+=OƗD}։e@HY}.lJyFʥ}D3.%^sB*dFy2ȼQyBlBlKFIQ|T܇Z;n1J;%Ej.Q,oQ=S* pq5̚z&)87Lt,a7'w.Yș.MgfCnXBs/=K')l^SE0Kͫ'lӈԙx1ʵ}80E98 EЃ Sg0"5G4>ba̠b|PϦytg^%ɭ, @3%`Qt]4\>/h[2,*<#-n]s(׌vpSMi<#C>s>'TR܄զAz 9F[^ F eїQq `̒K 5*zNO}"n']fS=?,akf>_ױVk;Cz}nKV[)<#M77'8.jC=T[,Ad g\2|NPTrCKrKrV ~|wQŒ0۾~仵,;qĐv_?l1+Ge1+Y٥xr`^^[f [U;,(yp$0捓!B駵i u2BNJ,wlÊtX[n@ᓵ6)j uD!} yسn<VA~u[0!$榶nch3hNi{uVE֭)F`czMm݊͵nchWE:򘽬[T[Qͼ$)@HM( b i%[I+#X "R9&ӷO}ǻ5 Ο/bͧyt߳M_۳swYھگ<~yGf>t%sPμV[~ˢZ9:w = Fhf?$lyrrFVG'0&GMo<Ի漣U^k/qJ!fY}0#Wt! wZ-w_UYVGZZK!n+7O.dTGi>WX='!?ڶ٫z~1=iMQOeG/5  KYYR U Lq64$hm]4|^ eYA(TV;/7h-a++K;,2(g 8i,9 R K)Pm W|ٖ-=5MA. -Tt!p)jkx.:l8iu "^J1@D^L9#/缡c2']<7i OOM/٥s n(;$/MRKTs 52եN]$k+'*:ߝ9+Ǜ -Z~qEju}?R?tNGἾ8YO{he; 7wig|mbrA#LA^͓j:k*:S95Jh.|+FM,MuxFN*a luY_tM$e q@?ZO&̓^Y<=pHDzXkl B(x uLt!jcA}˓7:a $wh䰙ږC\<3@r%kR"3`flGb%vh+VgڐКQg%YnkY+AqkVש4du挿ڛhyMQuq@[-d+SG^Gd1**si;^V߯>B~|v^=3TT;UկbdK5L<3K q=]ZL,-fo;H-r]#VuIMMsc Ll%Y|Th,%!f+=+GU7 7r*{|p1d|&(Ev|U]YwVu̖I3c~_z禶YWzGZh[GÍvv٥xJ%,3 F/R9,RYy7:#Ѕizrkqⴡd mn&,ٛ활+oν^w'Kmf}R(Y /@(:)qȐ3n8Z#׍ƃ΀24دxT%TV/u x`堍։`EMF;k8 ɽNmd_85,jz6n!8⤅kkCoSBW`@k]7 u# 9֔s΅hjx b#V1Tg4!a=OJ1i(Q>=Os $Sl=O̞'chWE:=FE7R rT}rqIT =u+f4׺!_ЎN3F 8qbq.EX*g)8GА\E:eޱk`Xv* g8Qd oc_oc@2nE`z, S4h*Caz&*=sVpW03Rj1(D^hd RymdEh>U<~?Tڃ.?^.P+.!V{0ȌTpRteUb35$e!RAZSdLJ [Gz Ɉ);=qMPl湗].\B? `tǺו#.^e\Ǹor u_o=Sv06dcF]{yۺ0zyYo3ZOg!6 b?VuYw|!ԌzvAuL^- ^ѵ ɫiW;Jh|&,;qĐ(6uIpaY(8oCi[pWQ/U?ZUp_"G]̽筸vgz{EzBȬb%5\pIxlB꿤fբK·z{d{tMwg/F|(Fٗa2FO5%:|1[g_ɘ_c{$sl.gΙY! N :K禓e9ӞpNʾmgcpJ[N:ߛ4bz9=7wKw @PA7(DE|%cJSD57AFغ3mAP%mPơ!6&z AJ!m%(gzyq5(o8Gl3nȳ%'-b)Z2 U6 W%Y-ZD i͢# K@UFwKfaѦ!eud(F!E$S+glRf7 )א\EKt 0VSP=ջ69]+> \A—~M^RM`j\\f14+W/I@G;?F"÷q>X100gܟ V8w8%KsspppO؟V|Ka a>%sPμfE<[SUMEav;ysߞ8{ )H< aL[H* 2?vLl$(+$\e|ëƣc:שhOY\fgXZ{#V72jT8<ԕvEB:"v efE9S$Yx3L2㼲]8hu`u' P$3p@$_%/j7:xVR+edhZY,ƌ)qIǖK/$cc.B%1r]FWp{in.\İb@v7m!IY~դF^#JM1H~fwCCR'LOD~I.5cQ1NKz}VwӀ*/N0}Qs=:Hg?;OE|IuG%[zCMǾ΢$ߙ #C!=j?}΃V.P~V-,@+_!K8_/ 'J 0`zs0) KrihfJV HI@@`ϯ_~]֦mt;@LM>HW.0IqPXCMe}=:CK l9 3ۍd̻Vp;#1 NvoPMF:˓`-.(Jq1pfM:n3sy^*Oc4nۙ|BNoW =Ş/b]. wxO"oEEbBltRZf!zb찦Xҷ|Ș8aeBp\^7yvXգ˞mM!+6#+,y)#צ_D AlsK.tmbUT7'h#` ِ}L渮Ba iw"nd6*JxٰU@8%5:GnڠQ0EdڒQنOR[e21Fy8!2 \v[+R+I9-=k \F&=(UOiq86LM9"^9_5XvYM#˩l^>ꤪdfÁq! ?>(W;w^Yʺ(~P@ȂAhj6TkÜ%rJX bIb.S6Ť>;0tFNP/Yw#A6gs=2]`s [݂Np·w>ˇI(I,*9v,&"2R$8NHEFaㄴ%@>(L~cU=rǎo,K6{Lak"JSō29dXE,D31aI,ڝ}_ϕf9߼+}uIw"xoWIZW))Dsx_y&F.Cc5NQ$yP02 H%cu;BEPaF֗(,Ҥ775fɝߥ>2|ߖ!=~e٤ *)M*oýo8ȫ_.xl_4w \"Og+@WAhԦ׋B+w? 3^rb4-h=;%T #;'sƽr~Pi"Q*/rF4!Zq(D*^k6"iŶŋ/#CHyQ3\ 0œ;%)2=ď&qF 1lsy~ Rٮ:Dw`]3_fTXE)ǨEXknRw;nlk^t:SsCB.IhuH*xA(QD0I"~|:%n˭ sk;[׍d<gYHLC@:EA&O1 c,VH#|Ԗ}5IsMؒrϰϔVS *I k,ca)MZ 1c Y(vƀ-27Yp~ꖯ!9P$Y]+&3JM "1@p`*B Aau6 ŨD(H~'Sb09ny.v ΁QgKfHX{PRZy HPd&VJY@TY)cq e{XnTyf(^ ,Nk& .11YƜO.PjD&|rPn!m?W]oz&u2i37_LP۾SveLܲ -DIS "c.#Hpè,A3'@b'aHW&UT+I4#qFG<&+)qv׷|8[̝F:v'JLJFaO ]aѴ;ߊ(8qE$F&FE;x]{GޑH#`T6]WTHW+=%FDRu3(.ı)~tX˹ o``\v?V0Rm[| _ UŽ%ڟ/W[}$/mkMQ3GRqka; X$8.7}a );3n}lV"yAkNc܈̠5>*uێܥ;R|hs G){Ͷ$2uΛ&!YאN!I4.3^'շn|?OfI D0 M׫~agn&=jGW3g2'xl?ކUZr9~R}?R|۫7)5vHC`8Suԁ)Ⱥ6[Myۺ%1ݲw޿G}NvfC&_]~W @ A{< IY"ߤOmNo(|YcK;~1GOJ# l\B5c-um`_˿sßߍ|p6[f?KX_ZOzI8ީ(_7c{3%}z ~7r{K;_5A gt*/'#A 3-Og>"\uB5D G41L*" 2Di XUsX9A9wJ&xhZ_nd>2aédeb|?] Z l6c[b !|=, |ۼP_mOuTߺ _ \}c8~MS+nXg98kIhp.#! le۫oowop@4Ս'z}2[JCI>X5|]IIHABbe,p:FtG"bȟV 0r'Y+-L9\ qTûm'LĆQwAuEW(Nt)6a),v ,\,2!D[[.@n"FKJ)/OVz0DSgFBLrõMo^֓>A]鵁4HҨd_,0DISE l=.-M $%dԊuZ0H\Dc0na"UX['Mcm z"X(a* \"kt ?GeNi;)\)51u`(v&#SDNc a([a)sÐZ^}Ј66\`hh g.(Tz@`K[jLeղEޮm4o3ҽ)|;ֻT]NH tƁ a>Gv nM.>uwmo6N%Mm*ͧoM+idD_HNkVXvWwn2{)Wv)7{?hrh.rb#NɐYÒwb|Fxѹ':wQr; _ Bf@h審kL#nh_YZ)v5Fc6OtEѯ,8z\8 *3cG?21kTayIT&9Nr|B1,yy=毫E\T7Gٖ@/m9r˥g:MU$TFŘzFӢӴI*t=ݓMbf6e'xeer. R6Z}"A2qZ_QHGւP~~X=. Ǐ|2 TQs""̆e"+}xI0^pt1!:ǹ^Oa1:)1L$Z;s$RkX*n#VlB SWlsYî!V}O!ZC 4m ԾlJ-%ƼՒ:D)٭4NKTL@Q*řOBl\5Ap 篚|/͓VuUstҪSW-SgZ'& \,,v0"ʼn$IB%HHaDZXs1B7$d;9 Y@(W.t|ߙ.F? o`}5Ss;9:oRu3?bh >]~ 71|2Ky}' \܎,k0ׇ$ʜ=XwL /k,M>=Zt_]ge`B?gғl k(QC9hLs=IƎuG@`ByPDtBqu/~LY`BZ2$  Һ9Ɏu#&XPgb3=-Т֭ E4D(׍]Z7Et[(N3Nnգ0Hʬ[0E[hLa"_ܱnMn<(":8źWLE祭[0E[hLIw!;֍aJ`ByPDtBqufneH.[2 !IF.w)hิa ~`(a˃PwY ^ ]bO' ;,V Y>6у$g@M ķLf5.W \Ll4w&_.ƌS;{4ܦܤᏹugܤx1hVt0JZFgG8we#}˨Ì-y0(qPgTQEv2 > }8#繺D󹻟g; |ЄMK˒5dDa6P2͉ oB*o^ہF.nZ=nȕ"eM(=`z6yX Hl֤l;*(HE'6)VVX2J|71E1%1ٔi wbn 숖’BPO!- r a>Eيl:w4L$pNrD&kh>-TIۥ;ݦT =cK(8VQ@0A(`!W1 E(#UH2n~6 &af: 0R! q%4҈ǔƱrScW{iHp) qFu"⥎S{kbb1#;D>|ߞy{ʉ)oOHLIW޻ivW1't$ZmHx)!(2UchQsLRV?Q!GVKY>|D>g۽Iɀݴ(ۡìg:q CbR.>v[cpk\@ߜ[va=1{C9-AmB89]9nyq, ,ó#c$c?=8O8WQGOVllz"rcn~Tv3d z[{-{#Z4x^n#9~o49-9{koxȈV{t #Z ʺg!D ۣlJ B5~R}ovMԺjQW cz^ʼnu7]t̛]'e^(+VCy[J#{*\aQ]I?y|gGz!Kz)@Ԧ B SU*W &* )2aL RBC0Sx*ȉv:w85C 48rAI78r G78rD78]]QQ|cc @z [rO%u11ظ8Gu%lXo̻lQn)<LPst#)&9[>&IA 4 )V`  A<3{_,&ss Va!)B9n͸<)Cqm/|C~#rS|i*&ZW9lИ4`RK K,SAk4YrF0HD)b\C9P ՗U-"p'f߀P kia2(eH4~JDoj*]/R ]_I5Er0cĿUӦ[t8!;@2,'#2JԃK\=H\3@*S!H C Z,%E$P4D1 v.k a3ƺsqfv-:S.yov7%v:TN @߄OE,})!Oe@==v^U"oI3g-g9#: J; 9[ޕ.Py`r'0m^ML}&rpsbݾ`nK $@#R i€ lb/߂ $DP8 d;IR X_ P+7`AFLaHH%1A.C{t~E*^):.@ v{9o2iM&Ä(DXBitxDp i Caх霱}L%eu!P+C$)m !@GKOb#|Rie4h}.Y`(PKc_LRS!aO>DGU?L?2M(n*> g^J%%> 6h(I)  U0Дz !NÈ1)T:U9TDV11WJ@sm""#S0 qvm iCc8NbYԍ|gJ 1KNk]))Wv*0 PjϳrYtxt";'8aYK0r"ҋJiŷj9$TP[#ݮ}>G qF _iB  8kZM῟CT%.^L0dps=' )GC=RP?oVx }i{|GVF-'5Շ: ^)@7>L,/U_g3?0(&gmob,+0' a8C)xcSuĻY1$#5ca[ 6_'bXq맿O7oƸiŇs?͓OqK>mΛ5;S,HJI( Mހ=ɺ3'%l̛DP)3 %mP ]-Ŏϔpm)p Bp2p{[{ ^}wJjVǻlq?vS0QOy E#g  x6ބQLcI%;&PjWv Q<`H|LlUJ[I*^\O"0"@:Im?]Ѽ \ۙ5j2dTaR/>C/G""W[:X)A:Y<FZș}&݂7Μ'Ȍk{?p% 0~uȔ2=b-R=Y?.;.Tƽa1: !S=::Jl)orE&+#4L\wW])@׽? !B<ʾP qæa;?: koezֈ`ƹ׏ "Ohs%ॸt%8WֽG=8 տEDl&{$Rz'̂, M$ G@ƽ᥺}-=tAB@.-SOt{‘V,ac Ga@x(,Q#n5"J`zZw&zO! dwY2'{fF)ţgg܎I62*TvL_7ܜoK\2VqN^ pD43S.T jViiL̓kyMo{n0q;æu5R?lÉ_t]H\# ؄e#QSd3MlX.NaS\1HrNK`'HYIr?rd7^YI*L5}ǖ(V0Iua>c[IZǶg\{Y'~>-;d<:ބQ>!zyuo/P. t'Aϳf<Ћa#T[_BGUrWMf[po Wt~\ٴSTb”(|+5߭VO>-. >4f:@3$?7 ס@@$$ c}o4hNՏa*yD܉=fK(OFYQ*ạ>A2ZBM {(N3-:բBJ0?#U)C=U@Y~]D{(ˎ?UeYu>U)gu@POMYIf8±OY*VeƱeD)ԡ41CO?JYdm@<2SXHyy<TЃ4Q͐O].d{ dTZ y0]1˳ʠ2:ïPvAFvu;cp>9N]/!bboIĮѣz}{ؠk<} k^s %Ab$xψ3!YRۃݧpB°Quz3,&=]>i cX[-;7:S ֟]iP@w챒3rUnr !7>spxkkxr >A( 7קtG?v#u34AR7sxO7U ĒT+tO}ٮhfJN|7W﮼I-PU$ey N`2;tu UR.T("4tv`u)mU"RjWep)~iU^7E7GZųyݚd(ݩqt i.\vuWbΆ~NJ+*vqί/~~{ܮ>h[T \=̃l:ʫ.bx$Mһm ͊:FqhNf4Kcs~<2ASJ# 8D0 1LBÃN{o+}_/F5KHbE7H&)gY E,iYa"PH3*}e4RW{mmF")T&+ȷk < 6QSHmJh\3$\7pNwϧsCqYO/~ TƄ6΋=v(2vۗy~k^)[0aE=R8SR& N$B1 II,KyQF ZO(QO806jo]%V?b^8o:/H !\tgNEl\f?N'˿,P6tx&sB)@R'SkM'jC窭RŜ^E׍.?oBB#Z{PilOr"[nvR?r>Q:ʿswXiN)+m9uQ>I~~z<(JE> zy 9#E!YoyVh*2')z o~{>drVHC?ߌt|gz}N` $%*" $HO*~y`S,#Imm`>g{/mDHwG܌IlaX:3?ei%j㙡 ݀|RC`)Vs+R{iӮcg1JMIfB(!.6Ȗ."i"HGʎק&_i ܞ#W;$ϹqfIچ DJVG.kۭۢ#)LPTs])Ke8A\rY}&G>3`C%&¥=R.RkJg}! `ɭv}G |{>+_pSmӀ nSɻkJ(!"×Eq):Wz4UOy_XeF? U:I7#ׄD)r#4W:S3' d!GURCz0vMG0t~RqP‚bSQ7ΏY5U NHܞ+Գ (<{+m*my4nS%a:Mn$E.B!WЀp),S]5"cN=\ 7G9pʑGYeA/aXP_1|FTfMx>kO>17G~<Ϝ^`s?͡`.zr[ ?kᵾs}bYwc]\qmw(9m~K5WaNTTV~3*3BmcՌO^Fq'jf`5 Ǔy\_TʓS *3w[.ngSNMK}\ z [Q_ϥ}b&O?t҈WWѴo ~%|%6q5UF¼0.Qz GƧ*bxI6((TK2ٕoۚG3owOlRn[~,vۦh / d(Tq*=H@,Lӛ/)*/rIQ %oev@Y ,1D+z\jӊXʣ8U*?ٺS]W5WOcpngcDl$5Z;0K}hx O{Wd_I/gLMJ6iiK۱sb',Nԡ,)Lv\Z8α4?_p9 TaHIut:$+=<@y`qS)zN0$JԒ48Ie bSD(Lq$M#"`rShRly-:Jה ـ5O;7;(β0%œ”D1g@( 4 EB"SƓ djJ$6NL4m$gM~m޼՜HpqW0 +'?\,sVڗ3 *w竰*|w{ۍLQB)(A܈cRj֣zFz +e.'+q߆R+};\ z:U{r9NtW3rQLNct{Z:oϹx91wz̫LUÅqesNA-PHՠ#gԘ([3t9_ytF#TQoE ӿ=03E_]Ɍx$&Gz+ؔ+#h-WG~6& ݔ?ؐrJF re~.`ϠRp7qсe j h?G\Ow`@p#T]˕mkl"B|uY i iQV lXCoIF, AE}*()_f2D~/NC2$ʛWuuT`=`@SFVJG <}LI^~QL!>? 0~ÁϢpliIN=4E6̧橐Fxd.#1Z '9EF$(Rx 9 1',"n3 U+g (QBQj;1hCh'OmW\m;\{;FU(Fɫ!md {XpnVԕ룍`Xp3Kp\NWo5zHk,%[r0xhC@z+ dtLph'r>:kSt鹄 qǀ"<."-az~0N"%%c̱[C+ 䫸Gk] 33sxAD] tB&|*C=(| Wui_` kzPr<{۸ Pe?,r2!||4B#kFU,L%iiJ)jdR!H)neJ<&*KB((aBshF$/77l6-NkN׏mwmM`k,чy ? LhW`qg6ϊS7֔t:DؔQ9 )rDC0=[۴z0HdS4Ū +2x^8ORy&^m׈w$JKr0% o)QxS޾&ooϋ\9|rԋZpQb Vr$4|wwnuHs (;ƁbSK^ >*xg}fюVWVto= ,_fo 'OF8Y̓IJ|*g9R_M&mW|R^A۠|ys4^“NhiIVyNT{ 4tI ׋W՛^W]q l;g]Bu̡QAxhpw"Qލ}wDjCHwP୵f%ݾS w V}8 j:Rk@T;M{\iHMY$LӢY5.Ǒ y6 <ȶʄg`gQN[eG9,g,ևVoY>IM>T+u56N*x7.7dN͊m9B_;i 1bk-E*_&)]9S_V1J(s:MZޙt㻟M hFO#pxfۗD&(?)P;r+|O J濍'M:yrT60zE4-?+kq"X;fѽ/K~XxkC|Na<&['0~s n*U(g[pzdU 39@佨HihVkfc?>̡K['<*&K~l6[_dpġ.¡ˊ|K@a2[8Q0:b QV\׎mdl:]x`vpԖ9K~sđb-i4X#}̓SXK.drKwЁ@At ڳ=}ǚܑ?gpbNcVk}@s;Mq;( *QssD<IS]69'bƷ&lfy{\d iN&ddMJl|Y .VrpУ{[,hdg#"cDP *CBI*ĕMc rut?;T¦;DjaXlSDo_5$YvnI[?Uɰ5^A'sZz a1OVDHmn?jQ^j*d|U!gU֠kJq>Vll'HoT-AuR/pKoPTX 4n=1|E7EC@H{a#:mZT(#l p\K7Dg*F|y3Dv+pJ9ɭ: ,'k`% 3V縨Kl٢,/!>M YɱRr!~¨o_/UJT2:k]K@*GnU`-q_F-U8OGfc<6;oQWGVWGVWGVWGU]] +Ii2 S̝ɰbɄ,JfL)ډd qb) yz\fTtTj׆7ŹC,c 1 LHW`qg6Sn^ ԩ/sZ9gآPC1:CQu85LdF \bL8ML Q̱ E 3Ml"Eڢڐ6!Sp8E/lJg^H&,]4o\!Z].>Eq_"S@ŪVf5_ky;h]g=BeFn`cKrU`b(NdPFFDf:2 m*'jP+AP5D$/ 6;~"८|=ߗ[E]rЁ݄2 禓.0u:#LB1#F"VhlR\g$bX rtRNTVWٍBɶK߄/b=,Jp|5`WkCgFadkALϣ,WrܻNt|A, [{ᕅ˲B_뷏+ևEtp9^Zb\jD`+c~9Cs98,~?|gO7KoPaE D#(;\[Ǒ25)SOL $R-kL5g6x'E0.kpb6dz2EŐ!TD,g6/f/?BLT&E(|9Ԩ >aߐÉaH"ul:CA -pn #gV&Y2ˆ,qgJJ/Dž=lrib*WW8ia2j v<2aᵭōp'py<.^>;657vFNBa ƀZDҭuJB%,CA38R&M"ded3>ȷs?s蓙_NFYzgƉ hgyܳ{EOvX-hL<+O/QeSy>dx?Gq@ڪgr𶊚3'`y挟xGme'wZwG78g m8CAQTt8q0S=Gq"iݖ'pFBs"wK M-}ܺ'k#A ڛ{;.ڑ ( ;yRGfdŘڤ,#q4O5"1řŒGB[YN%||'̓fT?؈oN Qb͹FDŽD*VRK%d2BIlDRG~ؖ!+ 8H MdN}\S0֍Ӈ-0&"%7S:$6ڧ`W2!bbC'I a_ I^pQF'b#{0sTR5xo)[@3$ǤZ]rHG[YN\\8'/Hp$B$E튕BjoΓKs_fkj"ʓ٧v=Ò~VÙ |q5Mͥ *瓞Ly-15:u.mь'5?(̞,-R颌ك|bY;Lq:XCgJD3{4Qx*l9Sw8qZLsEuVlB`vB (= >Nudz3pWTUW(y]e' ˏPR^وZr{7fGFqȨv4 "9Ʊ9Ԕeˎ7iZ=h'}KqvQd[`9s:]k9;3V`@:Yij L ׄZeL8M,spU=sqЊHC>|eXTo kDD ׀L$=I]v O?WUĬI]͈G%FڭJʎmZeqE2VMz{ }a9Ͱl~27hl[3Wʀ; Lq37e՘'W?y~;7l"b@ O%0x@P>Wm>lr&8)Z.Isv ].Mで:Q",ۧ?o]FPb-N}xMZ+͡K[5 !f`ۉU\Tn]Edp-SPaΦӅ[N C) bmQƥЮXqFk!ڧk,yw o:vԖlJ9#t 'wV!k|$U=]NȎxumm Eģ?|ގq4vFLJuC_$c C UR 8wRC*(uZZ `TÓX1Mo[ lj_rWԨEL=|[I-v',~?{OƑ_!eͶT!@,v6d :%N$YWMJl\>H+ wի'aȗiݞHꮆſ(m2 #^XW#1ܓA{=$hFAD\OǥSGbDW![fkvu&Qďw^IW5Ħ׬C R z* k>Pz H8C$eeμfx:czԙHU4=:H)Bv4?a듻P!X'x*%!Gg)9ZiO˴z$D_0$ ` [,Ӂ+Q˸=жsn&]s,t]#b:TF!^H0c^ EN`n 80xSL3`[-fxV6onV\LM8W ~T[o.翙Ȟ^}vvVջMW |^_yBs闯CAbx0 Ҙ[ndNr$*4V'`Qڊ 6X]8#$Նa0fòbv9DPmL(cdІrAQ[Z` .SyH`F+id){.Ra)v3LqZ9LӺh1J|jҽeG/MA\2Ѕ'[_?95wۇχo食F`Dnr9R{ ~pG|,>,noG!|gESέGܯq1.FD`_{pw=8gTRE5Vx8l}3ȸ㙂#d 2S:Ԑ/+[/F$RJJ0ʺB&gb!Ơ%Al]{MT0S}GˤtV_di:0L`.sc4Quf`װQ^3O|kc1ȅvH Ο;,tAs14Eg(6;]#A0Yʊ;!&`2c"b`(D{ DhLhOxJ4yH Qw*5{QXg(P_p)A5+ t@5㰎KJACZsZ袰K 5aFM0N"a1 DDxڳF%xBVf,m"">֪$H꘧yTHHm28&P( ܇5`.0ϝyFZ܅@؂T--3QK}khm1M km|Rȉ 0ߍ܅tfP"xPP9AE Lx=9ĭmygF8xxi_y~N/e3r"릜O3XP<岑s񻺘HglcZ^Udhީ.Q+!Y7&fه=W󭟙*S6:L-)K;@jp0&Z$|s3n|Mx`;`;o9MHMg$O8޿+M i+=iv(uYʹeo=Lj2U!T=)ݹ(~1;?M[g3VL5-隷O]}z4^~,.`en;jknrn޹8H fv ~Mfh_GDPVS20>,K[`ǃ1DDɠMmH? FDH7&Qhe"oϢ4Y +yӰO\v-SOpzì|Ha'EVRN*  ov PHo${HgTI)Ywmʣ,$)ekrr59߫6oyC҂܋<XU[Wg` ٟu2W9%Şw<<-oHrKO|g=8e#:% Pߧ*"N/ٞ:&OQi(NYb/OQQhDNYb/OQihJNIg'-d_{?, Q_^$S2ғY;-[+n@W|Z{xi`H+]\.kinAUX9B+h9O[h_])캎n0H-5m6֗j yK:`ѽc~)VwŻclYꏣ'i9\ݻp69\ݻ 8"Ă%K,A0>iπ'S+Ix;BsX^\Lr۪YK*hPC98!t6.akUx6^Wծ 3F` gHЈ`mt*&!`A  O%7.п/ ߜUEOHC |1MâC6g1/9߄RFl!?buԡu|ijɴj MBplԇ@C4{1*,csh'b!VWZ$Nvo'D:5YRCq{,Z 1=rOo8+â}9OqQ (߹5N߹_Qb< ]`iP><y{ST4jLs}vm<\ 7%u|+*$xmٛ? F㻲H(lWngƒQfZ`IϜ7Ka|9;SKMh6~)Cf|1!=Qf*Uht &KmlYRwKzFaKT,ɋXucFLMޑf$,͖6/-1҆+'o/Oh*aG/YqKk%D-Xfnj[,Z, nq(l ~2ɖvv \69BB>[+@uj~rr3kƔhJ%aS>4y-=lqI6''*s^JRɥ#;U?\hy+8p,:jFjxܪQfM Ͳ>$qz7`p>q`n@rYͻn@eBX/ޕ6r$Bmv<"/a{X43B\S%}#IJ*RE1Ul1r8ґTiPteu>9mxr$^I1-@:hlHs9Jb|3jӤڳ%BiOHc+y)_idPsJWoY0δ]ᎏ$twڤ6r J>uk'Y6ZBT2Z+Gu#޶TkKya4PlK4xWЃ fGSjesW:lFY^eR&g_n4kOұTqlZcњP]C5hKYKm;9}|&;v[ C~]&8DHgYV*C9 Rxټ 1P@ E R]Dc< bkwL+C#&?rdH)Xׁ:5:LbWmEb +_ ߳{| ZȔG~ :>+_}ӼtӘra%u,2W+B!JIT&hBHQSft.EoH'^x%ح?`&A/Q5a&]vc_.'eR&Z%x eN\'riFhp qQT/!;kL9TݺN\$y>؋&nTR"*oU 3vIeJߒp M@ 8{(DQ aTXfgxp)W^kZ^ P9bSڄOYA-*$uc.+MBQp+GP 0k,KLǗ@x$}>¯?ʭ" ?EY$~bєkBFHDYJJEt./N8yRhZt ^Qrƫ/ 7҄۽}qkF< Ao_πjBj!B*yp0-mohjom&XAElè[/ 0{W8;4@l)c lɞBNzpQX* *F:)Y-\]=`T $`C)"NS'0A/_X9+@.sJ5ԬZ.9@iv*jp])j*;Dk, ϓ=&%t]B%t]BuP=^JʑSb)PeH02-m{P1`uhA+aPDuNA]ʊseۊ&OemH Yu[sxO!)3M$+NR޽zifF2u?ˉC\Z#HQZg%e!$`jZ< d@s\r#~ȝssAb5.T(RLLj!hӏ0*y+̒-qBdwJZ )B5Ld\U[ ȭ ea[N̋?PbrZx@UhJc f86iN6BD]FʣL  Qx{,]qYKƶ%RwGM2D6 D<{QpMQHZ*#8D Zg XM@IgLPK^[gJk$ҋ#~N$dKD(u?zrcbIACD4:bEdhEj7xjQCՁk%Iڢ~10RA͖vʳei7`Nf'6Ҝ=I JBy$:C@$J5B-s5GnS(O>zA F*tAQ^ \ELN~śv掎6@rv? SA7=p).?|Qor.F޿Ӭ۫8Yę]N{ ʊ^?E3~s|Qj''mߌ=8{W}{ zsiͫܔE/? x=hp1Gs|R^UP*sPH\v4ӛ??چg*S6f&7Ǧf'tMH )N 6;E-s%ic2=B)M ˜}, 9/Az #ި%bog-vz aHٱ)Hh\95P 8Fɥ]H.9DACۡFQe-IWҎ=t 4B*TMP@HK]~AHՁPwKWƁj CURͷ񱅹K=/(Tv@dr xP@("^f;%1Nrzܝ Fvw@@ sZ Jj;i|ى[ l:VeAS j>x(*w3r`n&'U/yArD.HIn1-^K ]֏ӧY->90/M\ݜgn9cѼMrr|B;0pQeNb)jXuBelU"`tZjT]KjCRIU2IȔP(BJX#pm0èKG~ 'KBِL_%N04h ߗgs^ztύ YT獗TBx1*Lc1֕3pyյ&$N~]Yrs-Ѳ @cg}+?]/Fe4Jn T_pc3!7[ހZMN&N>WH/k=^l&$Og5"à[5q}]xz_uy3% 0ڳXAUhk{;U=K a4Ae{Z+8dȕ\[DջΛK{ n&j6ϨT{3d.p?˃/=||ϾX裿 fv~ (r`kkzdl(hp qV\"LO+٧1";#3i0[ӽ(s+;vWO?߽smӵj%=.9rwL< +i|I<}PTR}Oz-kI=Uu=Č۴u_z-3AIٹx  sxw`JHYۻ)c?oٶ ^_lC:?6n}"'|KX )Dec(ĭȨ)iiu)Os,ρ69ǫU!;I ʦT݋k*ŗx7cZ?ؠ%Q79-'N)ѫjb/ ?|8hRKecչ5x=!83̥Z!KpFUsח9pm \r:pymѷqdryr|L dzd Ȏ9X6^x円7Y"a}- Í=F\祖JH{8Vr6>yM@6ID QH 1$&'/әp=%qY/deLsv9:b#w<8o"x>E!# d\ȦMw{+ZP6JCQ$=:,q%s;Š GSN\ PFu_+@ SV_ ?9Jˣ [&dK`,ODF$q( 2Gwӝͽ1hSxu, gH]hJm7`DcNgXБd)?Ea NĈ^RtZoê^Kimz "$y .j e 8.(6MI[hmv~Xf.~%jj7Ze + U2uEƄ .D=#$2X%z6ZWPn!^:v:6?!EuilM-4YG:pDQ 4:8 |aZ`;ݱ;;z~~-Ոs8˄RGKM`)E9hHfEI&#L(H$nr&73*;볆^T6,_7_~-|]NFKf 5 jSq:$ol@SMf/L O"/ 7uU(#8<7s`hcp'zzwX ѧ3W0wQ|Z@{2LWePRoq,bX>Mrh̲Ps6ǨyqyϢU 0U⎎BL*\r>}w6#J4Cޡ[Z uA O`O1d]h 2q,/vZc|psHFF=޵8r"I/MN;quWf iTKmF-uָ_Y,~_X,N_Xr"R5:ͯQ r-Vn^9X:_ԲIM-+J=xdV Vd:wʤF4)E.28[Wdcu&NWX~?=.Z8[v`?-38+%S5{*$4K~Pa E>-l1vC @uLQjNXSK/Um6T\|}p*{`u>oIH3~~+6VB^1K(MDaG L&XH:o* lf~wߓC\*"IO}7owO7?퉩kԄ TԜ[PBh#\H,(rDdD9AP-SYF(,~7(1P=V FMٴ @`S&E4*, ¸L4l+4Z N:)fAn_4jg[5_i: TE<=&Z*b6Pћ/Mʗ,ij Ib͞KDobR ~Sn%/;kjjO-jRi5NG$^>SW#8 Xn֧m!Drڝ?E]$d~,O'yZ~r,"ɽK蜔J_ft4~7ϵ)(}vϩ],郧e W:LhʴqZl>́nw*K{nDT2P࿊L1AUpE$Fkr^H:v{Sĕpė 15Ol`@PoeYn1:e.z vG9#Q7B6qqWlh#& EX ,+ByJ)n(^qfq[>r<&))D^a奵,jr%]pЁ" LGXH+zH߶}ncX-fnYk'Z)#-x2J T(;T^ŋ 9"xEKL ȹoP"p T{)*(No޿|-ʅ.XN1RpV`IRM .k"P苇|“1aob0mH aṖ=WV耸M).&y=7e5|9"h^.}~S wQ~Wܛ`kh*S9Ζ J)/)ZOKwT(M9pį<+Ot4Aګ%JSwˢ (]1땖 (t A-{;hD ݝAMABrܳ5xUjlBQ[~ ]8~I"~/չIïͭ^KR /٭J_%diK/١KÏK/Lïר&3B0Z!ͬbaS"Z #nWwMyAׄOb (kTH[_ I)A%ۉ:}OOH$xK\4 1ZSlRA{ΝrˁDmm)Ӻgj!u4%cb4<Ӕ,v}>J'?-~ %.@޹z 2,6G~ZgLzZ'g[7g뭽 P(c\ӻ'- n[=:_b-=_@ν]ֵưFH8 < L#o-RRk .]\xqrz8 FݨoOg.pKUϏӯU5Mr!B\YÙ h+T:p^.>lU6X'0%x"]}|M{ycy}haC3؏6r˰zOcX9蠪w:2V`>BJY׸UG:~!0&&Q:8qpb >ƃ2Ih5e^<̧˯U['o4dwY8_ ri8l뗄uEÆ<{'bG%Cے*_jXa]koFf S+EoeD9qRc堩zT%H/I|=H-ozӇlնu{!"Tm9&o0|'w|Z~܇> hs)ՃT> *EwV&&1ltn}Gs_;\!]+klGX,f7y݃d64ۼdAeJ}0K5go4Zg66!LpfzykrX .ͻ@|m PG [U?~y>풗 pz%Q:gjr/TaP$6_ӧ:'i=1GY>rzrX[!!_sSPk"spqꇷM!B7]s%zOw`ߵCv!!zR u鹯xh i[ y6mC漽]J뜺}zOa>WX : ZeI{b=9{#o@)@v$G{#$C/DPt''86*̵HEErRv3?!q<97ǧH^(a4,lHVN%ʹ9DX!HVDcՌQE2);ʩR9D.,9YC.T+|^[eEҥvN]-JH6NYk/h\ @E2"xnj4L^zEpFºHF:y]XEK$|J7קyӅv4z Vd@:.Z!%< D `=%[QN ĩKcT6pp>zEVpp=B{^-jԳAZ]U= M' iIJy@1Y!䲵^չ"Mqu!MqZ*M lPP˼VC?Su ( g`<3VNrX&N4ϥm< v v`GjRAQ.5O9g x\V5,#KP_V?.I1ɪ;9`'ug*֏Lt 2]zLLi\ץ> )׿u iи=9'94}\ 1Ȭ!q{B?TQ 1˫!/CArmxeBpj|<=J0=.X8'+ Xڳ TcƤlb,Y%Z_!cC`Wq1Vx6cH9 yG+SSr]dpaWGAWus\Q?34bA#-vhs"dsVyMfݗ5EOl>o@du 6Ld>OK5ĺ{<ϖEY,gŗ8- x7 RU죽d--ϼo5S`{v0;*065n{m+r?}'o&Su;yy\wn?xjf`̊hޘ948HLAb͈RjFsL~KO|h]:@š>4jWWO0mT9#A6XCM$UX1"\Y$˗`?"6hځ9(DB/+e2H?>ܮ6o?sO B~d0OaD4uu6?!c#T:Ign=SP݈)p5 ՙ2jKܣ.JЕ1uh#"I^NZ) /kJT(YLOR-Nd$KOFJ B,HS F$ (J5o:aCVU6id15OHrˈ Y-s[@\E9Q}P.h6ѠEX XXmŊP0E !cʙ5VzX]˒y,..__I Z񩬸,o2Ȫkʪruwy))D/eQ<+!Pd460Vъ]/5VdIL ňFDK.:`f:^Z(#R 5^`p! I!r ĠEvm@`rj 0u) !{h V ׂ;wqP ȋu爐2h#PpԕK]*)6Z~Qrop?[X]'~XȟR87ۦ7nd۶kKuQ=7`Z+L1C r$kr0V"|1H9aDyD['2f28`, 4#ϗZsU\ak XXD 4I G5Pc.F\ʝ8b1wKMAN+i>:L0#/mrv D'G ?;/ITh$0Us+7͍``e*B!/B. Ҍ=1"SGx1NA"xa.,8k8Bņ#=Ɂ3BNI!mHЗsvw~j!gkȞ8~I ,iIʎ~դ,(:!Pİ%tSU]]`PkSVb)VYZEDrd٤q\Ѭ1 \NP)XB`ͦر _>^W^/:U]>t3{)<ҔWSdMheIn"D,lͅR 8zi+_Neb~:I[wu Ѩ1\Zk m|ge|u5]{-c} qܢ S+)dc岖g$9K3N`ܺc[o-S֜'҇SﮮW"LVz+~y L_֮r7wi=ob;WS}KOֱ,Vܲ2.ȏn # X ܄Yss|R2ުDZjEj$ ߼_*bƸ=qfu Cՙn/^(VTG`B*SJ[jG5<EG퐵 Z*He2̘u,2!@vE%XOG5ZFn[|g.Ňߵo!%>ԇڀgeT4*W5ɃA$k= ,mӅK)JW ǔ-6w('ƣQ_Cj\jc|(Io.43\o ~rdΚgZ,xJXmFaFȐ%I< A\e{>߿@o:rtv.Oxr<7}& xk*oQ;)up׭UCqT5>K6oK ~9n`OU7j%42JZvf:!Y!N ң E/kV PeP?kwOw!UG]_2Nz`D/G]]x9Y=rLs)ao[dqw%0F}-rRW]^בJ=>͋ 7&GH5hȷtDyƸҡes;l2KkDJOF2֨8MRSjS@43,EA?ic!AHkg-Eȑq)6&zBSGª]&m#oó&xݑ!~}<7ǻ,Kf$l0b(0SjS^Cɦ3bwNknK^aJiIi% #> ]_qr}~Mt-l֯r:;j7MI&n L/B6+)eK&DIA&fxD;mD65J}^,*mL@Y{[P8;r`{^P/ok/sU5} =6G#cY=rT Mnu|[ ѡ 1qcM1T@rY3&f cMpYTB*zcBYL*@Z#F; .l]jŞݡXgMhobxWp#5&szg탉Q{Xt C*Ǫ92V$Faj8S󏯾t 0[ zOF d?%ЛPDmm2M*[ ?AU/BG6(iaA Q2Ec@`K@Qr\htL@1(2)RsG:H Ɩ5(qo ];D8HL&GEK7G+LRZvVj-~~_qN=և@E|e$0srZJg#K-hpcvc a=c1ZĒfs8=$l%"ETZ}ـoJsl:v;le`D=RH"GֲWxߘyafC+,w/W_^>d3OD53@՝ Gf^t>Lo+~uʫٟcgs&ӿ_O5 _dl3,@j)Ë?k`dž^?-un޽n# XJ<vR~y,A1\*NdPʼnl{ykLX)0Nf9!mlzgwG9TA,ta.6@/귑(Q!Rb1@OqY` g(DUV >DI9-A gs …%#A-Ё9̡͍39upDI%e4&dh"KɛAp&`0<0@THvbR(k(HRؾ<+#+joЋDIQΉ CK)ŤxA$<-P8V(87NEBrѕ7B8tw\۳D} XX).CC4^v k1cӛˏ.Q&sUCB)Dm0coq)iizuqS.hx$^'aZ 0Z-Uk j.ϙCJ^1M)ڞB#?~3k+2|axrtr@xUC~[=:V]w#|XG^ [)SIuBZ*T*:W>*[YZ8!2S}_I;i!;6݄I=$SͿo>wjmkF-|MI4F]aj[LK즺 Ww0Y-& eвnъqpaA .D]'ǛKf(#xDڳ&SvD3rk- H#93^$. ,}i(U"DP>nx-l?|=ݲP`Е銭uhQ@?k}G[o58_͢Q)?fJ2&:m%)v r0Y[N@AJʏ! Dр"5gH͒n X-vߞa]i+]8CGahck@Cu%K>(Ge_1%h(dNkfY+y$h!rbԧtK=cQΑ̎ >\Lo&_-Mޕr뜋{y$d4N3(O}ݴ}LY_Vŧ,αJ*D%2e[w_?xN_]F7T(]J#Q[~v k+HDibe} $~@TL6 Pdk9.i3:)q]p@'GZFiCs<"fYbFsǜ1 n-{לIfŭFn_&GuV>X"A/ʁy bW-3m9YEg%'!}KkRm"7Ae+y!w"HRGZY9b$t[n1j=i ҽN{ПǚN{>Dc6< _8+2rFOm5[G +Yq{/E`BJ"ie-+rNI{R&GRso)'GDFpYFeU@+C9Cw)T=C}@7R7tc[6n&Po8p[ LjV ]xdNMX> k# `RZtux4@h]i+Ph'ed!0:cf44!\YvjEo;Rk7B/uw/N> tO;Y$m9ӄ)s"גK-7@>r5jBD3:۫hrDӄ7sW^W(v!Qke4!͂ʎ;&9z+2D g(=ARuG^4p̌Ѩz"R*ho4 My KM&twͽ{rOw, ~ƢAN2֘d}Y'͑p4NABo>ZJu\UO% mm ɢ?dN1|XVP}/t4}rS9"8>%LBc`^S0轌WZév^r0Qy3972_Eu|f)FlizC.!γ1=sDnYnx~v4~uSp +{T^Ag,}ƨR>@ A"e -ނ1iȋ QYM\.K\W_טK -'fkYi}>MǮ8hz_s֗wm~Kҳi{/Oz!XR9X}SqL@D=ioGЗv90v_uqѧ̈́"iR6ꡎepHQ>aw]]TUK`"u?ix'^L_!=d Y WggPްhzn; ZPt`۽W$F^oˡaBrf :g /u5RMe:0B2 #O g cc7ݴW^(<{l?_K[O:8}J蠲} &%ΠsNa'X0:AA8<{˳<{˳f`!T#0BgqriZD Z ,)--,n)tBahi +w ,ʚs%/qidFqČ&J1G/ڥ<Q~ܺ(#> C/W W1 aDd bAђ*sQm0P oze䤨a@N$$&Gm(D5+y$ ޡ*(aQ)tNr7 *w]Xo!}۵[+! q;m)]e(ףX 3P ML%(ch^Dl(/vq5E-I]*2P'RD\H-xb)%_=B^Կ(|\8w`JkF;:5 7lU Ki ! (*'Su5) hO(nAnA@ZA3"9"E P]PASAOiJFmfH%)ƻn)tV.)CٟˤbF x1']KqFQWA -hf ~Hpt#u؊xx㦢!*E4gp>p k ';(LŊF}WNa'ix#B5J9l~;qzqk9/,_. &"B`P`b IrEP XE &!$QМT/i"kæLt!,5vJ]mLf*Nz0%OOΛiJ}t26<0d\/@un8<]j\/~rNg~7~}&xQ[ '狇/Dn+kh*^(~^'Ɣ}.kF7(NEozQ~Ey[8iIewJ2:,|5q[uX&h{YunWBIa{Hq4w5 =}0}{38/>ۂ{U 9#t#,?^rpGf0;왣 94Gwb[ٸٷضxr<2'Unɞ qnG&^nt$NGJs<%xa' ~ك}GkVb B& ɳ`R| O9]:c9I6ԑt 9ԊaiwkL'W @JA{n El S8$8(JB!*Q]Q8 @=m^D(IV2%-2JsqAD bШl @*E6)0XUU6WPJI9#6VP R#C؁x%m_P-9B"1V7npgGk>1~_;,:Wdv`PA׿ןH@-![7 !)K:N j|D U~GטE%5?0sa;/Q)k}ueFO m0>Q/1o-UM # !} Hi[[^E|{d qiu9 6؄!}>p߲FnmSa YĵB3ڎ{>=7rm+>UcW՘%HzփXÉl R&yN5Xtsi : lz>9Xb4Ts-aDG~O YԉEk`l~EGO3 _ trpE%dh]AvM$Jނqƫ1Cx Pn 1A %#B9AwݨS{wj#f`ہ*D;ZOB-0ݨ|p3> ;ユ3T $4JjHI8'q"H0L$"3hJz,:CSjFBw>kI*JrqB"}PC9 ;Ci4s$F)1Z mMP pL;ObƓ!]7$]7_’AQ_8%OhaVn)6wT kF pܪ)YP$Q+DIOȕȤDX$Js%xjnZS6& ȫ2OF'(Q DiJĐRVGQEFx/mLy12: %2q6}zD#qAóc`\4ʝFh-u%$uqLMtmP$جeT~xE;TqA,P%Wyjr䍄TXY0Fpe9Z1n឴D zg[ESfcט)zpC J1,t50-I G%\hZniiMFfY Qtu{P61g;J 8CaiO`dןŅl~7(S}?ʓz/z̩[R%;]bor5^_}gsTEB+?E;pvԛiozEWykw.DΜ{.:u##п<%Zw}5a.8ƊBq5o{+9ΰ+xy5BNpiB E"?W'4ȥG-Ҍꮽ}TyEBDwmٹ=k̮v.Ի,w(Z2zN/Fڢ1iB Y M %Xȁ+PSQ5vj)i8`N{˜OoaHL`d–:o):J٨)پQsb'K#v|/[xf$۫=Œό]fsg.`g{dNc5=nַfxֳV0u~_9nkK r媐" ?43Ӆbۄ'AiDc)\ z<00ZE\1OA!_WF.5bBb(I:l#>x}^mMʹFۧXrw%X_jŸli~ue[0hc;pi0<']L0nvy?j"EI>R3DjԸe[&Y9.YM\xkk454P G;@՗2*(pQ;#Q ߇ =P¿b6rCg^F{%=X*˕ƛ$X| mA\|m՛ׂY90 - #|+Ϣ 8m]Ϗ#BȢ|~}5~FS~.ikcΡUfʯ m9;ێYF|7\׿GkWhRkú+tۑEN M6Zj$_; GΚ3uFTxT{,j]?m+šHk4~%>(a16#G&0'4B,uVދRp,84ZXVeR2d_eujtৌa/MWگ HT1^~8 Upɻ @huN[wv1 )yzi;PA}PLD 1Ci'8(_k"!1GqƏW4n HM' MX+c'by! èjGDlL>6'ʢ/_FY7:OKP:HC 1%T*`)z\y`Y:hy}vԊקm'ۺh]\{[߬H)Um]!yK9 ߾U=آA@Y\-eQ RPpV䞇ua‘V,ɊQ[߁TFs_xaֆl h4H| \QNzZ~;ztվ`V|U^O:XD؄ Nʾ2>R @2.* y^_&y}YF%fVR֦֗!D- C,hZ_AF3w3 I?FeSvOV4jneSOd穻ޕV%=+}}rÿ_"\EuvO暾y5CUek&JLԞk亮kxJyj|. @mD"?!a XZd1%EЏqmqW.C4}-#EQsv7XsK+wRcp,R!/U yqZ˱~(kr.^l8?F&f7\m0z?fO]W\]W]\Q. 0KlPTBq,[gͶ*! Ca鄂֪! ҆*KLocɣP J^ tXu&+U.+*=seȵ%Ζ$z+hG"Jn7(*fVZ.;:2HmQiCQXdʔ`SA@²\4,J7i߿jTDoJ}សX  O7ُwsO]*0[v}K'ǣNF>N?;x{pu=]t\n蓭@R?gh;q['҃z~D1d<ŋ?eцW'=}OkI+ziy:pգڇ~98^U;pykӣDѭj-Ycʱʱa(qBvp /=λQ_D_oiUIeeZfIk¸b⸸/aҵ} l"66_n/k_E8 0[]HWп% ̤,5wL{'J+'DJk=,huQV~6e2p4L*ru!cB2٦bbcHH\MX3i6%hzb*Lx`4giq48h@n!gpb$ngz)Z#-VS4ۇ__X~?V '?2ׂYE5fó%=YU@Dp&'1@?hϯym> h+HT~qQ~-.Oq9Zm60koI} AAa B%3+ ׂrhx~#!zjgkH6c7)cnow|==nAa[clîRHEj$twmg{FkW(>)=)hiSD(3Jѐmf)DϬ]I;qYvQuεR{}$ q:'ڦ/S'2s2>V5@Id jn]~,]^Mff/>S2{kVSMpQX A LJK1_,.@9d D,x5|j6rs {dȴz x7g$`'qbԫ=מxtŬ7noV쐇||qsp 0UUr=XI)-oxPLi8/Kpe1L\ȸŠY($ĎiX4닍 eFqzYdCg lP|+n'3&,A HQ=R21BM>JP2JnNNhlx`֪&4oK*c}%܆3TyE_}Hx@8<(G 1e޹xruabQ>F'#wwwK*}Z|yx%9-0uF--Aϼ0`'^V2W),(]+YdJ3!"s@ۙCI.)RBve(ҁ˃326+CeS%dK$,}lwhրK. /gnqC {w=;׼ri`maA,=JO;(4쐭"IU;FgO-"5Xi! (k;}j`z򣖫*a8p=>$@؎a(]4ƈ'6=_ 3T=_`>[*qw7E ]`;_I(YZ`2LÜJFI0TOQW\Ze, ܪRYVqC<V/K:ZXWEG窀7j/sܞkt£5dt3:c\v":v3 <ǽQww9ŻZyV2d*X).! tb` MaAqԐIi9 (=`n5l6|o§;$dr!TAt'VoЁ=m&ꭻ A J4)̡RYSEVox#l?$xz`Bw bH@8 㞜zb: @@qu}{ qW{T/ݏ[[hi)xnzcܜѼk&;+8N`\k:wnczDڷǏWDǨA2zwHu2g%Jrl]kUA-ޕ^F z-nx;,AJ1ͽu1.H.rQHɴ/%/*beo[m;?ݲaF=犯Dx=EGW|-Mo;5Aע:!ۧu8x% Q7*6&3@1 2gXP.c:*<8ZGse|C0ȼk[Z0Hɱԯ{˘~;Ys?s ,=V~mpݹŻFqACxu!!Ю$Xj2mm1 5Oz/nT׉1L;רW.  Y[|^c4 o#To8U,V륣XhOΰq$R {L竒vQ}U;ߋԐB+Lcui2U* .]+8(*+Bz 7X/}! mң C+r U$S @Rq#4fƚB\:O<4Ӧ꼒fNyaKK™D7$‡!H۸,je1`AVD^F^)T(@KȠ 脗N$h#|(-%}(x@RXڍڭT\\APӏ1֏3cPmGxE?r9} ψw6h<' aii-$-<}ą}1Z0 U J2@hUNbYHrcW IZW B aQ>6L5: ych -d:1iXlв^\N%SJK]f)k3!7&BlM=1*:-B\'x`X#+sauwBXAȃޮj"z:%&㕓A=T DSAHXu/J0\VUB 7b?KpDU hPE)P䥔% 5 :]| E2q/nZczPvJ`y2QX䤟miQVqiG21R|C i*6N&'lSDSdDå\^9tR#8%g3@gGQbp}6A3#QJlv)8ICi"M(mҵSL}1&a0)N;%f^hCb8aJM+tY^Ht\UԦoZ5ٍvChւ.ztWg$C[%I\BNj(çOON$g;l:q^\ s]"2mq+&W7axN)]~ FoH` ^_W@ҙKEr\ɣ,4~V4~su?}cìy|]o_\^* IW&pQĀl:g4#Y.(,+B 1Wz׋|ܒy߀!%uvҞ5 3!eY5w72",Wp/1?˫e }ZL^pߌrToЮq~4xߢs ;8eA ^߾n OfnpoBadh:fn{=#@H<cp)EEwǞ]4+â FnZ΢zb_.~G3 K $s?7>_29OwL`ZZ9^񕯳 DtuGwm[-}MYuqJ>^Ϳ%d^,t) p pWW+avP tsyRV2L=.3&YVQѕL&hEut#~z egQdr+E 32*T$ D*a51ẽ*YKanU͎8-Cˋnvy}U\.;؇/^Iwo[kQC_=[ &@f=Nd<3lQe1vvL$dlPl^0ŵ(CkQ(3ĦDzcIt\6pJ[n.ߺ#6rB1'B^b @p%c&V +n5*h5!@p!]}>$%yݾY<Nw;M*lB#\Oj?fW44I! MR^ЈL{|-ʲ-(7)<' E XU? ]UOd b$wqҐFF$Pkѻ9"{+)Z m%3u^}l=f:-=:sÏ nM\Hrɹ}$̵(}S4WܫѬ<|4ferGׄZ"!DևFd1!cٹ}RXuIvk l Q5" ?%Hn~q2W\HwRAf@G€]3=Oop(gA`uوXMefGzҚ=uXbd*f+uXz!@z =; *֧_&?ԎE>m틻 Zqyoo}δ-Z]k#k1,T[~ Sg?6I0BNJbsci cz@[wՕ v#+e6uIC"=4[z'}eS$ZH\/ƻKc.^u ƹ6[J_3bMET1 'ZW<IϬUFOc q9c-?q#,z:.: ^QEq+ZÏt͈u~| Ӹq3Lf67~vu;>uUL*8, (aJӭQb+3/_7' 9^<=@cFeg3yGa`NzxWsB侜-JC.:KC,ۧQi6dęvQT=|xzգbܳZ./L  LȎ:}jtme_}ڵ%Ҋo5cXSջ??y9<٫Xӟ:MInXV ɐb|~n.~|Z'-Y˫q%K]рzQ~]}>7 nRDr󙆘a> Nיogϻ9; ho hXbu[]EZ+rr9 1dH|#kM˙7t h fLoDsw{>x97K0ڢʍ@7깶iͿ]mz쬒+i cjc+n̠%{#p%țEb$nk$&Z?D]Sr_M4@{g'!9-,I{(w=LccuuU?,{RVyVF#U 5w U7 ݷ5-u4a7>[E4ˬJSؽOkW1e݇=n\Mi߹@ vtNm Y9.L6}ж”-T=\8Snq,h8iY /FjHWXc0fgQfBxrs@,${!J;y*)I8:G>ZIcyWbN?&aidNXt!O{ik ZZE9I,9q?N~8mf9Tp/j6Lơ*WD@WJ%X(se`6fWլR\.ĻVC7 oٔ˾dqNtW* 1OT+CX0VQcФR2(LDVN2"#( bU˔,ʂP3^rT]Iwe͍H0u̒ Ea[pgKO(nEv&x/ Ū!n N$Z-z͓Z Ih+Rm7wJPdǗv8:t5~q>O! ާ*nx~ /ArGRtkWkbe 8q>[?|Mmr{E d^yo$v='S8%o;7șHE3LƓw=1 $7Dѕ$30Zfts7%y8.҂pHF֪[3ǜ!aL&P˩Uߕ>"Pc x%KMMÃʢJqi0 0k h\~WŘb{Zr&ED{*C%պR Ϻ,(_XC (*}FSf\[Ngx5mUvӘJs)8[DrS$ U~C+#oഏ޿)@\8L1[=WaxEodNیh;w70Իށ˯z\&IȜ̈́ RtVJyӘ'<'B1i۲/o0qJ(XӛM(myE mLa-ڜ-NP^ּTM9t`@gx{$#mwmuWGQIA:+ik]Coƚׁ5k^ּ.fA-:-@(4&PD*= vij)%.ѩ.hZ u~ 4,Fv6Ԃ e\ Z 0F %$Kh$5 s% t{jRݳ=B$k[sueq F -[s'E C 3,2Ffd:XY½^$ՌgֈDH_䑛d8ekT-'anfDBUV@%zqW-+D"d1Pq4AhR4a ɖ p5N:7hR|3n?xeZ$> zwoԶB?뼤;֠\0P2M*}VOցV@LG Q{p{=#|Jx=di8K5zDnQd%j)J:ѝYoPg E̱ߊJ흊ؿD*Hus\UkY em ;aNdZT>pdPm hlR4]/æ]+bX2ܢ@F͛]lV̚y) +Dre2(W5f-gK~eJ,݄] ڥ0ϚU*8V]r2kFZAZs>-o˯Df;ӫ:Y^D n݇{[Խ]M;?=]7=-\1ؽX ]>AFg,:_ݝoK4}@2Ff{ EEO55&Z ܓQoDb!y,ZJ[ncnu1aϨn"[YOivv[] bX3hFHS?V@vkC^9)':YV"> v=O􋝰wT4>|=yx3o~R Q7g/-\T{}Ҋ3U' Wx)Xg=Y<=1?^r s{ixR5_BʏC.āyQ Rjot9βiw$r@M_FUh1:W3zqAq AD{qP㌄WhJA֩trD=9 ƽҵW$Sh:D;2UFQIZ,?ebjm$ۻQ6Gr'oՐŘ#6K")ۤDh \SZyZ+Fb l2--ϤY'Xѥۓ=?ݗ")pYD)zjo8p0]؆bY`ezԘ~bs$}l|R<73a<6|a\;"N7MVk;{ߦa!PuF֡]rkMV]&;7ԑ?!1YTMiAΜo}  釂4xX(/!B!Z֢䌐ϬiX,Q-] uY~>|sq(ly[E@$V G>;Nc]Ch1ɠ͞9>8q >`_uq4QOs /9 '쳁O| fR&Fh!#C*|Op{@z*c4I3i{x\LZ/1y_ZK"y4cYN#HY"RA<ĻzHI\;IMNݱ"cjrw@Bqsy'[(M' mYs0& #MCD4Ikx#[q[1S:ƨ-z0߆ʀny Nfvި]=7ӽɡisa `(*69$ʁJ8~?m[i$OQ2ը 鞂בFi+"}UJ6KQglY`EUų\U c&l ;U+7|mF\WPeC#ȑF㲸,_n~Ʉ ío&ݦg-(]| vqgi-y#7Y"0go ;(/vl;y1I`poF>G@ RmSAE?cp##/ 謧67 .uEdYeΞu GQu$fjs\ZB;_l-O{WHUny*W!! Z&9݀kP罶e)*Jw=OK!J~OUH8oCGN! &ް+sjj0T2*; ]̝w Z3J-- }+o.ƴ}wG[P_{Ma+o\09ctc"H͋NhR:w  W X?[ḙq\.1tsK 2ɻ#83Ź33Znʖ5|M=;֖4~ zvxH޿!aP2%3]vĬ.OHDKJxuOKَaEG7Q=/Sin?g̈́}CK& ~eFL4uR )Fy0·x@~FiXXgdDC4?]z!ox`4G#~হ!1Pf(ݢo10n䊘ه|y$m{*L+fTyIOU~]iVB gk_0Mm„5F l;_TX+3TNG!l6&fF4.uH0M;_W?! oC_Gct~×3"^"^"^"^8> zFr \1]2gI jA᩷FIj.Y_a5?zQ71ʓ!WDNxH>@\$,8}ɧ~ wHvy\&Nj,Q ۈ$q8"s+$2K c8$zAcҚLXKT MfEhƔ]Ơig25FZo06.$1)4ufLi Nސ6)׮5JfQ317hh=QJL MUSfPS3Kti `D af10>*~MHJ<" K7EBP!IgHY$10 E' 3բ\VyޛR(!wʑ7K_x!)c_qC{!o2% Xa҅yd4+sxc >[61.\'IGESS性(tأ??hd3qn)\nPVZ0. }Q#AbD9AKeDk *s@FWcH2hm=4A/qD[f'G6$'f$FtiF (]l($`Мc6N\CtH>:t T ]TtVS} @s-M%;VH1IW90[RRwۿYjg#l5!#SHIj(贃eR ToVjC| 1qrFu#\U@,RgȤjHҐv2ʨFTXY e.QB=4@'xJS3K*uJ`";h ק%R#ɝ0,7Ijӡz̧!{ ҥdzx@JFfi0p6!0PZxQ?{Fr_!% 8bq{{|ـXdڍ!%F$e dM׫A n4㒿@a0B[:!g*=,7,ខZq"̈́x4xAtѩ*9,A6FãA+дYʬP- ᧞^fp)ʗx`+"%h3A)'m 8pmD#rh^ -Ypb*"$eFI"PI༔k}GU QؗR8G Sv<TZJ <8 M| SαH5 0n'8@ҔkHLC)*dSb2Ł5e%M1gJ☚ 2lE@y acJ*CI 8(>`Q0'5)]u|&rYMTR2X!͕B'!|JrmB(v(w`֗[DRH[9KДjмTcN:Xiif瘑v0-:q֊' (՘BkC@S[ EoXcq`X`E=MHTӟXSB=12DBZJ@'51Ř oXۈdž1M\o8@2J%>/-m;#If !S$l`T'xP-hC$0TW3Y-1s:]كP(?RQ-X37eN)&AtaZ/?o3& `yqsq~D iOx&>bT' P1 -p.ck|1*P q7cQ[<BU{Q+iDГ:^yӊ7LJ㼗z+0+9>ɰ]QCDtGzV#Ўl`_*)kHA^TC@ߗR(WBƾoQ r4 ,[7tZq.GU[kʹN`y=hX{Vh$B^ư$ZRuf5j(,BI{q;^tDDKT`J$* ҂iBf $2HU>৯ө@SJqVboPDw2*Au`@ՅdY-|Բb{;ٖuiH5Ԭ̶TLPgAk <㈃6+vEvIH1$1[~:շ 9^MsBg~ r`m'nBAD7|eklj =3VJF𪗏eYJx *J-]w{o4)XkVqԒ p:ѧJP2ܝ(8(z̋F!P9cĜX1'/z2]qz|5b3qXAH0x))IpB|d @PWs H '$U| 3_)W0U?_k')šMq.J)e*(fYDDUiR_f$B|q:@gށβK6¯si >処ۧ.&`s.ss[%fj`L0ٔkN+3lGh1uc1"B܂(Rg ZHDd(gvD#0љ⿷J+KwI '5EE~Y?S~2S@ y!T{P V+a\8 ."<һz& H{"-ڬyqZ # aO̘5$b8 :;Z]A *MJ_ `,ݚPb5!! h73C D9ԇuhxxܹ*=Ԛb{i> %fԹ@{)V աfXS ҕ9#d**P>TCT>$N2%ɟ|Od+QuAt]v;5J  ˳h@_o*YYʪخW2E&ITH3xzS{޴yztU:?mg3 W=qgSaJ&Tg |>״̶LPSڛkљU (T "VĪ]'zQY^D%n:|mX"@)›ZPصpSmJe;>KoQ=ʨ) te.҈ʨ= OY 9aWzى)h0yt5Ũ3*LjYC }\x{cB騪4;?l%h=(ۘp{T47Ȁ vo'!^ =v؃!_r*?Ԟ 񶐊؂p׭8}Zk p7/tu)ɗZ6%㕛CsS]_80C> gSAD٩x+E[c>C,$қAۛK&LMWWs~,W/ ^gIgIgIgU߾-7wV!,C4H-Մ@7JE:b)Ny'!d\[>nu6ŝ^{Te}[KЂ4k)| stw%nbLE/  \EbmRj4VjcRNLQ1oE*ЉQ4H)+}j܇Ŝ /,fI&QY̜6\yDPdm41Gc'b~ BP~:p:1% ěc.KgBE'%p8j4y#2{x4 cM)bZ0IφeGY;Maz'8GH/gă;oa6EHd6$`bBs i t$*[Ϋ2LǔMѲXsAf W.1?4Ŧk|fq)En}g_rٻHvKݘl#ݗD)Uv{`faލ70L03vò@\5 h{9Y(~T@MJ8y,%*Y84K(XK;Fb 5v;DJc@8Ԑ]au }dtT{k(EltN56xhH8mX h#F0Q4 = Pʴ(ދA}{:B#IDeR^ECY^2oC:9 UcR*S(ϴaS%O} q,pOP9h<\ŠW)ZtӅ= X@`6;OQ!%Z6%ڠ#Lm[P!٧Eͻ=QmQV6.R:A@6Sͻ8ӫ95I9jӫO6Fmh՛l]QƝJ_}:w.$rY\h#:+cj<{ߌX,ѿ`m?WOp>4(xB{G&9fel;Z)en(K4}VfViT؜G鷫6h9 Q)(͉9%ƭtY~8Zc-Ϣ")fI=+rj"ܗvj_*51uP}~_δ0lS.C{]KiS'0P]wqh@Gjq3DYDH4B#h I[4ʎSKi;Pry3ʄu|IxP>vps* +Llq׋Ktxe=%Y4z 82HsQBg&PX,;L"4y4@#h 7'-Vw?stяkTy]ăĶN/,Ɠ=kl^L 82B08V:f\`,1"XDN1Kg Å[ pU'Ҩtx6ʵ&[N@E[P\g ådޔ /Ot7`ZØ$PC ALP.kl$N+ǃTN:oJxmq,FecLpެePfԫ(ףlmW{yF3%lת *(*% V5u^^`z?0L^r2iN8ߛwsi&5%B2zݩkl ,3&R?bLTU+鐕FGͦa0E5x1+a:__"NJXαҼ[ŭ%;bIGdK髣 lw/cpYGuy!{\n[H+\mŅY5DQ:NB7W9(Ь*9=v+J**/ TWWa~9tu誽7$_/qHm~x!+xϤE?-?LENv}uqױM7j%ROP_ho_h)4>m~u/g_>͉Ͱ>OOpLG|.L22ů;f]pmהM2k/C%84뇜' 7^ .8S1QCsFfEGNƩ.I-nyzop6 ڢ@1 36`M+^Q5GF4gX@)3BtLEs_Z(s 8V8VF4~ff(7}jT2lnڻfy/Wf)gD{:NCUA&mFҵVC򘹽49寗kɈ/5 \牾V:gce'P>F)fC4H栤U8X}bhXd Wrqz T~С2<49 ʢ^埴aBa8Ve]|!S쩲k9WkM+t)H %eYcHD<񈫖FՌz.v(\СM%4ʆ/"HCȅ0LHK@hCqջѪ|S˚iLY' t GT\8⻞p\;ٳ #ra=Sp bUy~APF2[\܄9iӪ! Id ʘ]xNY\spp XnZPp=>zy~^c(SIR#̧X_t%բq$1>Z4rEmvk xQ#=S?t?;hCխ&{֘W dyUc2%st10sХ| P]z쵕eKӀw+Ws(E ~AQu"egX!i4mfwj-3F)AAvI'859GRDP@@x-xZ9&W0&4V&sU uZi9V,ϡӄ:UsW:M#NtmT_ߍ44ZSn=۲y #=xS?oŊ(K(Wo7exԤxTIh@yaԐ~77qqMx+٨h mo&hbьXd(/=cpeaFJQymzMC_54rY N̞i9TidsNH33j`9pKpb9rP:r`V;QSy&Jݵģf :Vz9RGhV+8 5HY`gD刯|Տ!WsgM"X=8IƠxmj ^=dZQXQ嬢hXUsIլ1^O:XCsx&$K뵤C\mݓ9sE s^EyPE|4Q"Y}{p* g2I꭛صfaH2pI#CIY]hÜ3e;VbnџqbfkAvez:,̇j'J; ! 1'V?qF#RUؘzoc'olQ 2]Ae*4A+&!o> zxch?֋$p{|.o43»K@qltRYcHd XEp762͇gvլEC_7^]o2,Ag~yc+穴;I#.z JPdݻ'R .'cs?H%V9kCҔMҦU/ʘ|ԁe$ u t\pfy%ٳ᪷OkfVm,mh 9+Z!>@ׅa䯻.N; ý%[Dp#(PFX3KW?+Po"3&蛬-+Z# ?F C`!eWJP[}lw^MD;6:o1n[`txgO[n( [oljNE*y å Zip]BZE#ּpX[|X)kbBů@Hi-(/*ghD)#s`D !K c @ӐzJ.5"S$S$CD`trư$L SlxdWC~} pYH5e?nw`"_(LG!B7[TE sf厴CT諬!dHT6uۋŪ;LiÕB!wm#8}?Ӌnjԉ}lAy"K$'q|3KI6:1EZs#pDhg &bFtXם_$ `n«R:]L:韣CN?MD 3{>~  3mn3,F0 NGb";8jIʕ[˥21rWIFsXN}MT';$ޛN[BLX|'@$%I}_LN:oM3.hޣsǞe텷L3?PW_^UgmlzܽOߦOV⥇i/W~; p9wqn|A0R Sȷfxj/WKWnW,%XqK!g@֌lp0p+#ۃѠEtDŽ"w:sN`tș2KCrʱzFw@h-E%2ɈTB Gɉu1HirB-GH8Ԏjݵ%\ΥVl5pT|aJw^xq|i#*N1ˎSgh8(, x?h5J*L6)bU8ԅo1f 7wg*Qi4l)O)Oj;ÌyjFHSqNqlL,BtPO4Ɣׯjo. G$F9u9QFLG[QA HBwDX!*at [ `-VіxsVCap3gY`9!B2)UCXCAmANqq\h%DcU!$74[BŲt%J:o"%Shk#+y_814Q?m\O̓g󏗥Ϟ9zoEZ5!\(.14:¬J`ER%@g%Jge>O񽿞~x+4cH8;YvLrRېYx?p|IqKϏ*peHM:q=xgft9vzM?2E[>ꞃ[ۛ[;/g9:w}5헛?nno{{v϶|קAʸ7tޞR)Ɨi1nO.ysydSTq 85ÝOp:]WH1;?3HW_KYI6db̘.^V?_-.//zy薗I;3z :>[s?9zW$_s[5{hxf˕rfn8A?|V.\QO./Oj{RsY'^2!*hy>lDr:~,w:~~:OzCwKOT룒GSIIٯB\O~ü{qx>:pT𼜅 'þkZ8S{%'/>f{zUv'}rm]~(+wrwO'eMCRr"!)BbRVo 7N\>3g}>'գMuw ׋^m?*<~KۻYy}r88lY: qg(%y0} NDX8jI eT&F_cL>{ W=.>Zd,[dZ Xm)2g(+&qc^$E r'$h0aVmX¼ ǃyK oEF;_i;v wf(O+!TYB0=X 8JQFRh_J!$/!5|,If0_T鹉L5S&ԅR;(Ij h1}. m`HͰs\ͬrm!/6P\$T<%`<6)63c)Pt,>,N0d>ۏ_FQ9<*ۓ?Nnȇ4^˘ xpnJF \HsM+012k "(\Bq#(t)q1֕:p}67M{|AC43rɳeINr+OE*j5WN(Wɭu+y](W/]wFJ=nj6+`<[<]֭lG.kk,,q.,cTrE [Xׇ: 3w[9\o+h:|yCZB0iRHZn FߦA} u5ZI|p;WkZMטp`-6DUBʹ!6tY>@BZ "g%̚zqNBS ϶;l.P0lEE(!Dp,כJHQU^zi)#0P)GٵQV>}7Qڽ2w'4 i494N*_gjOɤ};7<%cy^OAuݹsa~wfjԝ TX01(yb)w\I 'ٱóH4Efc* JSQ88H PC?Z)v?ڕn3txEo90_Mzg}(מG좷sy d`v5wr޻n0XR3s>|i~_[@<__aZi1OBacK4VGx9~"7_Q\5/^ma=DWPtE^5`88 oƍꫮ㫇GI}/aB}߿Jv {.7O䏃HO1^Uwvi輷w1=j'?| [?=ɓt>C_z\C .ί{A1%wtQ[H!r1L{dVib>mQ?nm__%מ?ƓT}O{*.=Tյ ٟ~{Z>ih!zﻹ/.O@>w. `~_</rkM*kYgkQ}`ϟN_ cO}#F}2''W8p2^=X? gX-ӯPm7q + !fwtN{ >PIJ_Gjk<DD*>I$>Kzx_˘C_fTҡ;'Jr+&_MJxG6*{;0Atvl[X+ԻΚ[}nkTuYPZoFG7? 2\RnLƖZ*AqPo9u\Vz('aNs!sK ooj^M}mg5VnPpjv8=]1;F%:WpϤMQ"$  `ǕtZ ƙԹ,`M2vLulrYhf=%N&8q^UYQmWM~3[udU8zbGb5\yuP>^%c]=bֻG7gG|-{7Zq˹%;Ģ $I?6m8̌qf{k;8=XDWx$&{ Zbw*֍pQkiǤdD*.TirEwCi}Ix!"!Ic"ֹȻlNPV87'vg[ʺ+s\)cڙ#YieXR9F'Nji)ی[.\*f QfzǬ8P׊6Oi|kU(7ξ0Ϙ\!m[;>+/m1eO,'Hd^f0=SNͣSÎknLtJᙨ'.:] 40$9yd7甹⪷{Il䆄f |n1$ lWPVpHL#am0i! y& Ji`3hX19nݠ I,#R[&R]ʊ0bRAyR[怉v=5!d[2@dM-1nݠx{ɵP 7M͠r̀EU|o$ g/4aeuf&!SMmJ M MEZ s[SR4ԓc3\bot K|Cdހ"aĤVQeatM&a0HڮhI|N}+ jHKN27d\.o@^Z Y JŴ*Mh#(=MVJs(!4֠r/J,#6d].ZJ/[D7 XU#.x TjG0ԿM1)|tF -([7\B E]f 1 B?4dJ'bC[(Y o>6$)[R`jZAQFdwҬ( F2x D%7V-V%Qf!v6 yjwWX+? HJr;HJM- A`j o4oLj;?hlIX0ŜW­,$< wĤcB^mjlWUVuJ(!S {<$uCH}.0>}Eoҗ]*{WRzZ}8TJsl˾sQ eZ"8-ގfʝw{%FpGIxe{ kX_m >#2}7vo1@Wڎ6*qA4VT{s>YZ_Z 3VfHnK~_ت:V`}m^i+6D[j[ S)VsqRK+7D\-i!:WKiNZAؔt\AIYͤcl+[PRzcu?ڜ4ǹ~C` ʌ]6'c1!<= Kw `W{_Q20/돨X Ef$V*ΜIS^"(79Oa -cyB[lu0P aL(n^t=E1Ζdڬ00/RQGjuJ/L2*W6/tr.R)u QR_aEnX,>PXpQU'jeO/ND :Z BS_ͻ4䰐oA(` ֤LGȼ,}(cú6)]JsʲL('yXy]#y^ȭ(K'qwYkR|ޕƑ#R2W桱ݗzzaW`Ֆ%Y*3T*f塪4 Tq1"؞{/I2fqyYĻŎLK7Wݓ#mKJ&Ѩ:tN(=%`D@:EFFYk "Vd2E}(m'B'_>i~nۖ:xa)V[8,W,÷g ZD)pØ$䀗IsL#j49FGֺ͓=@ρPE,RquJ0(Վd)-du6vmhb"qj(SI%, K,X -&h14@&I9g&7XZ=2wC-c G$J;m^3CnJZBIOBsCnrzi=9$J8),?ERB܍E|GOT(H}F>Fsə.6fE޲`SA+I~F@+L%0/P$ Fs˻ȥ65}lHJ*8K0K&s %%F$1."L[8J4`.u!I!ZK#s7>| 3W$J&'_~[4w#&¬r:f!(LҵR^Tܧ);)|E a! 5''Kl%U U4\+Ealb^Oɵn.*~o"*e7öHb.!4&g~L 1orRFw"i&}~ۻfwןo}wb?w_n/{/) ^ Ɋ1!%fK]&4o~wyӷUU_ƭ?GzYVgY5ujc(`B 5_!pStޥg?-fb9n f]_v2_>LyqɚY\_wc_/gs g"E JEr1 +24בh䜓6-._rxlVTNVԲbi0s 0׫i m MpSHCZPs!kH|%w7>T従[(R#6J+0 [ۤP K% +TJ" &ZI%\# K0?Z.).F2`Nm򉻛J̊Tp9Ŭl">Qx!d’nkvWkba(8Z^*MiCm(84W?H WwݧV@s_'_7Vh4qiߟtCo0j_͖cy0tVs>VRawDk?d7rI ]eL@u{}؂($S}_;ЂLMv]~(ڨۋ=RԒ|LX+y~*7 C*p4WDE>T*-xxҍWm*hZ^嗛e2ݔ!#M:+Ulk# oWI@ˇ3<߅: %0Z?A>)޸ t49z-V\*jgbxmv蕞xzCPh&;b+6>U]7ɀo己 n5ԞhpmtL'f:'u;ߑ=cOGʑaj~=Iv~d{v&Z :r%;t0LGFg . :dxi'Iq ~7Yg l "Vq"XshiJ<)Na$GM]_ԩIbKߡ($-)z<_5!ubXN{IAJSLkT>V[q4gpoU4ɢf"FJUQYEVp3rʭW^)dP{[ŋB;zT! ePONhdK3qɴQJle5R:8+,^4 4})V4{z2JӶ:%{ji/JS kO0I>98PtLcm"3T MZ)8*q/ٟU;5^NP4{FpWn0y9"h^ XHqEQ1J~bH~ \ppU_)?rZK_9?rJvda dG)R.,#2Ѝ=fr,grkIFh [/RġX#Z%Q:bB]\EJ|o16L9ԊP?*ҳHϲ"=ˊHb,6: 3jz&8gs1HS Cp$bEĸcu 堯x[YUR!n xz 4k{_ phRX)U147/HvU0 Y^w󞨌xx"v4>_.NJ "2BR ArFXr"a%A^}Zy~@!sutt׸9\CberIWA+$Y*0ZFVxٕԊoLsZ>ҡҧj1ȥgO>&# =DŽA+..׫6jߞ|LG !مAx -&+uбy[5JJb2 Eb ct!&vKA7Z'(ġ^^6f+dwZk6ݓb7c+zͳ{z]Rk]oN?bF[;T[Q,tmO m8ނ` 㽙 uܴP1*->/ɔ;}B !TtJRE]tk<0%oy| )y.§@8BPˎ#Co)C#bQ{aArq@aƜ$'ZLW&EhȢ؛GVcnLlvfۉءcʑf+dˑbۏi;PRFa  #501}#S~e'h?%S ?n/dN2%R%iI2ߵ>ȉd[OnJevp_:F漂>#'Lu!wQy{^O");t{J:(^[4`PP\lOV?A<&fui{Chx?c v`o|oJMqf0ZM!fO atim7rJvw21X`>b@QU3݋lLKTLt" L0@B헑iJcc!kT&>71rp*VϿ( {} Va}!)0"+ڒTMJd *ܦFH ]R7ڂ5TLI*7QB70[L_钔0W^;6aA8U9 H+H"WXuۋR7pA\"+cY(4겲HV#uU SYQ1~- 0`|Ï+1{eA`ht@Zh9أx hrsPѧtu"D5KTÅnU{96Uz<{,I~3N#rzѺDmvY`]O=6 ¨%a}$"O=U'DZiYjS0\!Sҕ;لqA\␦sVC3%Ce9"FZ_.gy/3q|qHG-'Rl4HbT:zro!"+s NjyhhyHK 8u9hn@/4tM*csg%j;LK&Zl>8CD'4> ^JP.xdt兇OtLK˭-((V+pxPd$7Uu*X7rSl:~/ 1 ^,IBHу=K܏*T榺O &bwozDxqTnry )|[bC?6b؇;v%CϴRP VVtf& yZMr=*.:LRR.r(Rj-sD&#]EAAсֳ"ӟM t6=)rE Q r(  SkUˎf'ʇs%[]s*4 dVbf1W\lSAu!hYkf\ e.a}>s˿ ň\s- cwFdMy Sp#WeLQH0j?j-L|܉IU``v/wuJ<<.U"3oB a_b?sߙ9 % ŸI'M25{ߍP{e̊pz$Nu˚cgSV)Ȩ:5+0U5]%S\qn)Og]kbvRNV04HRƹ;xM.$%*{2m]ގ[6QiNj̨`:FA~u}~Y=*9WF#B4ny)%d0s5ҶTʊC~/c]lh!:mtG]v<ЇFhd1kGgpLQOsNܘ[sNgᜤw?DH/p`nnf6둲K&o57W^v}ijo#{J<̀cER{U\HWV!DzL 72'%P$1"&Huim28*܏dPR>+.vƧg OH,{w,}@) $erݷ̀%/pZ>PtB 拄..1MMM,oCx$q?=?t\$Zg뼃"%0"+M9DTfEF+&1p{ZHh~i/]SBs9s!-XW_Y  ~r}@kf%>'oxH! DcNrp]U!i#)D?^r$#uUt(D! ֚OQc< ZdZ'"~Qk,/+s{nOjwƪݠq$|Fˉ "`%cmߓ,K\JSYiAQCBZfgQ?<=l>#uqMӔʀ. 9W wPvt m|F[:z:!z^s3^zp9 ˈ¹HܸEpL y5o\0[R{kC(ټL#4è`^0 `Q磊t%O!5IjfOxc"\XpghT-%W2M:ŇiPXsN!fΈ~V _wƋ -\N ٤RQ'*Bd-VR5V9+a3+QNWu^ApI!8r-0D`®2Ѻƭz|}>ˊO5`PjM)Ua(a0"`riUTT^55Ow~?& Z8{*oZ)np`+'CD8,ժSr~9WCv5?_˟~P=:T `bcD{ߏǟ> ]Rěm.J%}vC2+~}Ψ?l'JbI9)T]rj%au-n![L)ՔqF5eDe(J?ߎʄZ=6 y }.KuklO%";(IbF'gKj1G/T->71*K wYV`Β{\[\jor/p嫡 [2x!wr(ұ]\xk/_lM*Z?VkTi)[Qc6=j9]>n|7 Y<~xA3b,a ׯ.r 3 Xb"kKMq14@,t#F|>_/KNxA3Q χjc4~u 0χ&6kOl}yi;prnbu[Th /no.2%;3xz0z8E c,n-)ocStNM// ! ri= ~&PȑYY,0G"^Ir)_\98Īu (,SM3W9Nո3/CZ$2 0 \xh4NEܸ)v4.w@%]s=7[nM4}Xt/hF9yiA^&t5#nM]򖆥$;盝J\/8!<{9x<ң})#xQeRv獥7V.$n~CMֳޑhqpiG^[o%WKy9w~{]˗r1^ؒ9 %*xyfrPTD `p*Ŝr@TBHa83utgmZ{`SEX)Z*Z# ^kk5*D/EsDąrV/$'5g~ؔ.k8P, qa.kO M v+J!Klm,s7rRlZuc&Py!UȔMF{H?;L̊w^gk-V;r.S:DldIPِ~4KTِ/e+CUK: R {2Rk1nrH ̇a)nÜV?pVڮnlZ})*IE1\4%6Mu]kMR)[hB*hc̊!{J j];dL "БsHD(Ų dJkҢЍ#oEkw_VOⶵ;E&Ɂ,|SdGCsl@c*9WC7JhJY!_>Q31W(Regk}U[tywr%ĭE #8S?}}5iռ@ZwzWJ`JfE0Pm_CN_"ٱKl_/ t8ѷ]v@)TF4{QU20SdyYW|)hH %dGh.g(1(SDRDj@s: w|#צ ^=kPަoϒ&4`6B =a;E \ C @\Ul $$ja!ؾZ>rpt~8 'EwA-8ޜƂx&ghbYIzRE k'ɣ3/~GC-%z#B0ω 4bJL'cܥqT y8;  @<])5ҕ3U5*aҺ5BtH5tG1Tb,IJABD;˙еrahO B 9.'LP+ZSsM(y`nrj3pKs/ f)[:`Cؓ۷ɞQ͉ܐ[ ۱ٯZC3{8 I S[pL9BPkS ҈  bčIaMEʽ%ƒI45/ mї[Qp?)11|AlѦ#sj3LH/+n]8H^w$H҉]\%\6F~펅OKX.@G_ OI^.rD}^wVߌ4~|j˨r&~d W~h JY2NއGϳZ1 ,OK(2 gvz*s}'c5e6>Bȷ-٠N&-*^:s`-}cHq$i< ϥjSjaAQaQKbLQWf[wQZ>B\d3s!טx-4.$Rg8BlVc}^v<ӇȄ"e_ %djfxH BqkYrUdo:5ªaR{hKkQ|GHʔ,HJ5¶>/Q*.5=?X^1r35VM2![8?zҺN͋0{yvJ{ee*N6Zs,^/0,f<ڃ!=n!]|>}ޯ<>P$גKFY{w\~=| |q|{6Mgޗ:K6Wa(W~4bؙՊ`Pg&VlGZZ5)~yiR![mh:)H]_36g 8j{_npFn,dˣV4[ QiNWR"MFcWYA3 SYא䚆ǥmF([uUC@MKj݌1'* -vizH*tWj!HzoM6ؕ Rt$Bs#Nd;84MTWbBfm,/(W< ɎJbքq厶A|8PSRMQB^Z:۝E4Zb"/uJӚRjMf$wf3iZ*0,& paI ,c^xd"=hAJ!&x^Us(Ӆ1m@fTs$Ths+gsZsru9 #ӒgB*CB1GP˳Uz+>NQ}P"m@[kw$d՝UggiaY^[A>ɚq6́G49[H>WPlC@` -M = eޖ0Tn[0TvP$TvU9<ҤGBo74^cOUfeDo!2;jsI*g$%)-0O'r{gLʭ0#Ma2#^2[iIfro,rƲJW?:([>!%*hqjC5b;e8bR|ʩIw*lJ~S5k {Qs]yLMCx`u}qwIN B $kw I:F˵h=uS]ֺRkZ)I@r{ǪԋuHQi$>C&z K(kB" \+SOW)zʏ9;2!;:$4=k ʝ洪_x?=/<-g֏8I1þ|{=ʕD 2&L 61BY.~t8Ҍߔi, \9ʰB\(i L. Ƙ"#\ܼa}gGBqA=:,?&b5>gG{C߳S{zO45c;=4ʆ@m.핯|*ԍȻc,s}H&x1Nldg5Qu>stfr@MN&.]YTR%hQ|exsO /O 0Mf>|xv0ٞ}/[Y0`~IFG>\tyaq,+$T~,O2 ˧ @8txd4->=dꦇIEK$8=8gG%fdOwOgAE|:_AwMN,73;])P{47͌}":1R0< ?j6lhnC놂?B ~ýp!g%.<;$<ޗVٟB ]W7Ed9q^o|W9 /2a?ƜoP׾S/<)Ls>Ϟ~G3z~bt8Z\\{U4>ο:jBx]6?u*CWY3{>tن?ڲ* *>q.vy?￀ }q62|(xɎˣ_p4)J/޹kѱ;W~??OAT?TພaݕȎɣwzt؁~/R^&_EiO=!>:z<;.}8l@&ǿ_ Wxk?_we˲Ɨ^<>;>֮7TJ| %w.)Uo4)XҤhB\&&{p>G{}D2EʴhiE.P =?%=˜TH EQܒ(7.{{FGGp]K%B^9LlmAh1-0kzmeq3^Az̝H<UJx~7s7 DW0z%0ρ+8*jhuEO*A;|J%HU2}A"ӉL'2>N m- w0T8ϕGʩL*2&\Ȕ.owrFw_ASa&g\rf "RBVSH;GCTr6s)eM),b "רhqƌrPi4K9씷9 g8bD9Z@cAs/CL&J%yhIsCVљBu+qYscƑf8oq1(Du4gT*XXXfq1Q`hI+'Ls=iN*lCPggFzqaAJR2 7ZAB5R"Jc#B)%A@!WHy1֞-wYkY$]!28D {i ڀ5K-0N'd jXVHG,"x=58]so"S?i#8)co߾m3_ֱ+A!+F%(&w}$ɷv`=Aձg3mY`.W@JdDnA}m3I3m3׼cmVm B$fd:'="[,; $RKDLj;;qa8..8_"%ג yQW(@~<@cF,pzN?EcAx/za4q|²iAޛ"M߃`odz (j,?E= ڕB:r oP?$jX" WH P`jAzD1je ML94 P̝s@1k׏5M }Dmg+ h?h_FېcMY|H#CGFX0-AJmsbb3VAԮM jT?~~ꇦ 7_?4 +~@clT. Q(;v)H6c J3漑) ~'@\-w(xh,RKÂŗ߲~Q]Y;g EPBj#٥ 8s$g@$mCRNV[3biAGo_?naSa5 f kUQw~/^ 6ה Ra\:֛(@,3$,Ēm; y5Fy+ JF. bHVG]k"f\'TU,wOjƲkj/IJkR f[j{46>ƑjkDu)-PPK0^Q#=B!oV(/Z;#/&Q;Q;S-h[xxLBF0GŎq:n\b0r`:cX`Ga{J/G-u9!(MNErV6m j5>~oskh$={g ٧1z{-cmE%r>zw:uc8^1hsy؍=_cRkUۥ ΈY{ZAr$ZoU9g R4%a*|PAÒjfHMAw%Ih]o܅ߏox<A[+I^ރ5wCt<0¦/7^i퇣nO(SB&@PëqlXN6d8o'AaS/Q+?$URPtmftlTYqot᪟},iO*U_W/ȞLI#Lp}gs/Y3y˗.ӽmj.%UZT]ik"d `@}m~;D3ݎ ߻B5w3fcO*)ԣ b;DixzLͤ*6d_y<[& }mvdUvsQ'>.lS"'[dMkWny:.imzZ_o(+I&[aWX j;YW_XTemi~<7~z}IJe]\Nkx&車jgw|>?_ 3ᄐ<>Vg04DAK| UOںz3v)v[xHjk+ uu ϑ=S:v7G,q3]JF2O7}~Zll2:kp 1cxrH幄+%ܔϥ+f/a/&7S*1 >yVU<tFSujm㎑t+388(mh L |N |/^l|Jȕ֫7zjj´As9E-^-۫%WKn2nH۰jJ ZU.h / ֓p-JxӄjcRMQAa ('-Ha8G8~!IF@K8뙂Hn)=ix98^yYy%3"ό:C4O8 'wCAY?&+uds:hte&Sm+\Mm"Z5hStT\_&Ѐvh4:@&uV@ dNDRɠyL8oAkk$ |*@X"䀜M,T4Ѓ:xI-dw>OP\s_]Y3P0>8MOGNL 9 _pJ ;Y m @jMm0.5 eRR* &xd4 t{6 `w6½9+mMzCGlT&| 4iA1D)3>gJb\Ьu'DGa@m:k}n8 _r9@=V>,%T:[ɇsJ5;;+A9׳IXX,HDIL?t E;R,I<` {iT^Z~r~3Y1{2OxS9YuQy7ÙB;*oD,O>~gZhW^MƷ^Y \\i;Υ z|5zgz޲/J~MmNSۣ78>XFΧ760!Q zb-&a4krMV~2-Z۴(댷٪:~9zʩ1]ɀ.h%ؠ8-.p'j$>|^HjELtO3#}YCQTc D=6`U]M@ѳVTdu%nI.;hR,% *{3{rkjۙDFb\~oVd7dRD'+ɯ5)ED lRD{#NvM S]ۛXKx`RT>!BTaB`M,AJWoâ=0T?65I:v̎B!IioH;#N&i^7Y!xL`v`:K=ʒC*7+=$Q4\E@<jrn͏b(Ə6PwCAJo,(6۴( >J?Cx^$xnyԤAFw<-4jb=p$DU'oL'MIFR$: =28^mJ Y'p1Vf W!xm`A;ـDMi$#.Knt#nZkA4"^ȀQ4q]ަEe[Z YtwA$֨oAibcP>:U1kr~- efQU7ehgVhJBYggjl1Lޣ۴lBq!N`WWdpZ.= $;I~6I@г( XUNz]]YJ EZu{|sf%4_4Ƽ %TFr[-fC̻ٶJoU6Z)+jk&A~Q'xC֊w&F5J qLMJsfѺAXJ7IY'DW5R5ƒn-eX&Yd!p ǔMfdՐ5b=ň79EBvnӢV [ ND'iD=O0 gxxlQ!^7fi@o/FA.ӣcɑ}{>h,^(#9Iҏ9S}Y$'EcPн`ra@8NUĜA[x.lNIb_rJg9Fƿ%i6 bqV-$;PW$Pc/јC.̲THq+u B9'&u8ʦdShSa9m;X"GzFz$8.%[v{@UzCzr _rk `=㜾6f=6/ԭ08\RPQX%mUV.TB ĉȉWi Wחڧtw T )ʅqp F ʏblbXX#f *CEuHgVR+eZg&%Ӕ"]] &tA=r>D$r)P b1\p˩%i"80Pe!/dEs{pܝJi[%!,@+ΩH\dl!cE]X4 'tЇCp|4[ p"!0FO1v3[Şkc3I)Ď**{qj{DU@H(DyD%b&=ŋ\_1ƪ2*Dp(c*,{ıN|_kH :$6 aLA;Je!٢BN>`em4.^'؆Tf9IU (q#ea+>U`΅+8qR` #3+ڞĤS;Z 6T b# S$~D 9"i#0DE$k-adB;PL,9n i{&eZM1YSĂ'ϤWr __>"X\Y &Ze5P 0gL&QĜI %#fB!1ZdlZ8]9gPifMWAT5 }& *j: YæcNN5Y8tHa&)Uu9k|<ܷf-'+gh&t wߌ.Wݣibe vrrI|cťFv9orni9İrwGqQ&įdږBg$ftj%mw:)Wi_:ywHt6C9: 75ؽd j4[wB @cLr-as9~[[l[3۝mt*|햲خ `9h l14]e*Y\t+R0UTl0Xi_Id CDNkl_09\7)D6GNN|xdXBVqjC.&!#aknҡ"Ur5Hr_ݡˡ*Cz9t28߱&=۴B"ǂhӞqtľ}w?:oyM͛i{*>dڥ-|6t֣\c4=uPxCe@la%{Ǘ_7{7l}KD0Ia! ۷ąHCYww{M_3*|y7-'?,D0_GӴYiA:~Gש"!.~YB#gΞDB~QJab 5.`);+pqq9GBl<.7Y6JU\x~s{[Fd ٷ~:ilK(EqO]WV&SzsXƅrj\V\ϊa|ojQlV6?y/ ΊL͗g^$?xr6!"jCEMN)o8;rA[&tJY"rB]qaXc8IG>+&^T5rjy>iEq>M8ɤՏ?[O}iZqi3QVAO}"۸v|n?o[A;C3dyDc !Hd v~}/ `ue|B\an>Oq6%\xpߊ_<7_jBnq?y;jૻӚ,NL<j,_gtƎ2wҶ~ws}qo닐zcȒg6~>Ҡ!Vhߝt4yf{7= ITGg0{`-2ʝHQ#)*yv6AEpj݂ElEۡڗ{ۊ%t^pUh+^lL<*f/?/..UXG( Tz3m9~ے0ڿG] ptx {:x_~۳YTpM^yUzF 8LIr(dQ"u][:w jjQg&XסsۭsJr]=)zslmccLF֝qQXCx#tmZTĸzwFef򳸹5Dٖ)]^:^X%W< ^H&Wpf܉^Jg)Y'B\qљ7nჇwf8 k􇬒ž&?Ȃ{~لgЙjԤӒ?[0^tG/M{9y=_ڬvaʧ+%ª8kW e@Q^_GO0_ { /|wwg*UwK3}7e:֑1}3 h?w;ʌ2/ZwkoニE*_ ^k-~p6?da1]*i qPx9M/A٥y}$kVvHp v[(܃_oŠz$2O.4|'WwAQL-<䜌jT֫LH×"ܦJ8u'c߇C!oB>%\B{KЉѣ B#^]__9PӲz=n,3"E"wQ&o3Gr-yị1YL2;ie~1gг/->zvulCBu}/U{kr!$yCAl}2<\jW(8Z}!ʌ}(w'4z˓l'Dfq>`RǓñi'dy{æͮ}hSXAZs>pF+x*d:e`͔ 66jOK7ڴhUDU%;+N,krXLi\HDbҍNFOCQRUx,੠қF\ XAgU AJ#x-zga]R6KfT"+(#*S x(ckb!A:MQޛl(55^KVSjٶR 6pLЭ=B)АA)ٻƍ$r 00%&Af9nؒ!ɞ _5%K%YM!j`Yꪮ*k 4%&%"K >V =i3S[ tR3!%\NIMAyi Zg|M'?\1^Sr0㶖LZ-nϼΩ"A"yξ"[#nl-d3 CAea(C\TU 0n*|j ؑ V S!Q:U&qDc0-O-V(`b `,/  HjB;VDף }t8)x;h6I&ֈh>Z ^T[`oWR2$(q6Kar¥T )mD5AT34XxLߢ}jgI.} /Ǒ1L<''U{ﮮ_0^G%R_/]gmf֜|X&2B9E.7pbAh<{c8M4aV)do WF˳(PRdvU)m6,p扼pU婅n%n6cto_t~qay<8MVB*kC+daMVBZXQ[%B[GeHk2 /*vhy߀!ܸik 3*h;Y- 7RL LI,Hrѱ)kQ.&Tj5Ra Q&-Hf28G's X5n6[$,4]<Sw0W6LiD|EV$*Q:f$q"04AP|Wcε&$T~$5G`2`f#Ki Ƽfu@ht!OxjcXCx8}N(Ӕ([^[2׻jAUN/Ҙ,51gyJ苅ߖIA(v3iGnB KyChI d/f5ڹ`@T"F6/O٧pSPۄqJ`'0L$sH4WS ațZ2 x)%?O1pýuTđ} 9ĉ2|Se5jSYNMY/1F۹Z!OwĺS2ɋ1/츟/y^yz _nf"*nR3z$>M3W7Q:k;X41%HҖT$D"í/Z7 4ˋ?u0OM&(ղHP-F_qu䬗3 }w-Ų4 ?1 +NfdDIBI h^(oyχoPlxCƩ\M ߿f,[rXnY,d8+<3\:ŵXfJ+Dh´~>roП7C#_zNTE7Ѣè;[1ړ}{K=s\44-:@a-AÉƈi&W~fBɂ?JWerkPK5'-fS9g`SLɐmfZ t2bZGjQGn篒9wcA/=47LK?L] QOH8]h )e w% nc[uff9fAaS`5~3' bbN Iubc`,#4V4qe60{Nf~<1t ٩JwY ?Xy"pSLO[oܽ3@>s 'SԻ_8c"QqscB(" ҋ?X0>keD hCE`<meRVG72$n^e_~|F\7=p!ZtYD?}|2n8y}!l[aRjM0Έ_$_"7Ѧ9]-_~҇1 LjʙouBβaSM8B}&Z>=lBE}~$OI:|ξ +§AIb\ĢHwk;"R lS|7MZNɸ! :(ɬ%NaH&|Ma5bT#a&9}]/ugw^5)Z7}tqex2Ŗ{MdāLFE>t.-`3%YF$z-;V!#N57)y^^fe=L"Y:y{,+Q i X8݇*P tX.<1HaQzkTxw̑j1;8"7h#2O$rT.ďaP>EaГ*d#F1f;NIZv=~m;fR6;/֢P+A)?p͉-m}A,dKo?'Gt)20N\gǕyh/@$ֵmXbJh)~m\^v^}yi1[E!,/hG %[K9laרeL‚NTs)Du9'ބ%755*cyk c孤$?/DG)>#]eGp&-8eC8>q<}jsQψvKH"'t(j0'hʜ~7 UN!ߓLWR$sg9+Pڥ |L`D d{>%XR6}bA $^DKbIT[ɗ,cDb*"qUخ8Uٌ=tQE;ěFZ "4ݎb_} z1Z!zpB$ Fs&?V &p4/R?Rο<}7TowNXfkg 8V!-$=*CO!&Ǔ0Jq@1S!YC5Z z7*V'e`>ZRO%9!6zDIF lvHdT&e/$sϸUXpx-(tR#  $|;aO3;n hlu7ꔠ&5{> y- q*O9]cg\>xΦTxѪ#lj)-.i!57;,<>_ j1">tAz#˼bG?ۅG MHtY'NHX5WC(@x]șh ԦS7yqxAX5R0Q +EjA *5PxoWRd@=\I%=}q=^B+qg{9CroH@!1ZCxLm??j>D~|Y\|rq> %D4l#a7՚~WF:&d-.baݢN(/mwM~+(|EY.~}bo@鉺|:tɩj그qZu帓_:gZoqn>f٦Em5a,d;v2b9p˅-v-4t(9pQ ^ eárNQ%Ory\>ѽѹ|u#+ &B| a7tfM9DؒMqv',#N1*haeƐ5Q%AfF4m/z,TptyN/9}Ns|ANboq< W\:E믑)ngis(b7X/!Gc#_w2t'y//rG=I ;5r5sGJ;#c%L$S19jT쭮3څC7~ZMiu~آ6PvhLje^BXʱ VAXkp-EՠtJ շӠG#պw+xqmIY-歭K]c]hu>8H.w*Ӣ#r0X f !Trs%p|NdHBt1v 1!H:(I,RYQ$%;)95mT!;jrFaO7WQ+}f3AfwWA2Å"Y'WVX$FD ur8TQi_c'NLPch-n3pndW^&S/ux7]cp&ǾG^pBnn_Fh<[٭--DSl8IO-QXic8;ѥ3H][o9+ ,"^cM$ۑ$b[ֵۭe%EU_ɢrN{K0Z$op%Ba`I"*eb2mfpf}P r0xo !4%0Yn=?[@f -E'$*#ᷟ7Ϗ*\MF#?m8FiǗ'G`A0st.ףj~JGqS^MW9~>ߙue9Jf]j&zԟ肏E4C 3,Ur;ޜ=ݍ?Zi$ȤU2=wz22}rsmmABe"eM"enʜ9Ie!:J˳u6iֈ߳'.o89Z_N WGmo& f&xsQg*GYԌ9i1镱s9Lj4mr&#!dnF\hk rmAѼ?^D,JKR,ƨz5B;H` d4@p Zux>sx7J2R¡mGǽeC 3"ד@^6rYtm8= C<9pAV#(.,gej8 :8ƺGAaG) d}|[2W|V+VW?|{|-y\UOooBۛ r7Ood@{9Q}70>.W@X /*"L$ pHTje5oҌKi5%ڠ &FiԈQ,W7Ƹ*s}w-hTIԢΑx1sW>PJxgXpU=VGĜwXɹ\;B E,& 8ky8&K2%S wXDl)3H kZ2fpF5I-BH^;2y4$5˼;iTBB,QdM !vd]wV,fUOyU҈``3÷GJ5՝dmX;%׀r^BooM..y+d?&!ח!2>+99L3ZW_^*Am6e{9/ftvrTݶẲixfs&MGٳWWOcttOB(QO. V2IdКn]brS?b _9 u ;-R 9"iT,Ǡr";Ũ:Y_)A/M.ƈ2rtWQ|uhD mpE9=? p5 iVTtonuy QŏpFi *U0 $gDR_YLdk5$nPM\ݳӈ}Zvf Z3;U֮( M8v >C΅QhrM5PšTcD̹p9#UbʾF?*,Gx/8c *jYȼ)c=WHV'7 CᎽkP]:%ŕ0d5W6z,Od@+"CD-)R 9/V_vrL.tuPLլcbZ>UwKdd,0SS`3ө'ڝh@.TGk9`Ι&x[vixaEЏwJ@NoÐkfoya(¼[, \G<:U$\ս> `=8:uD(FKX%1OЗ mi[VTej "ez*26u:PhB<|9% }E'{vyUv90I{L34oxa$=?u@U X{x0<wEwҀ=fO+θ/= l;\Yzm<>]1%= Ig~s6f?P2@*r=z3tpV6E$8lRE@)Q8BRGC@gh䀹$-xn0c= >hQ\# I #>Y#h!(BX->*?֚sXM~>E͐iy?\Y%'C{TH8W痕cDsv|Q߯FkЦ<5!,i5 0ٰϪkn~st7[jC!KwM{DɌ簼bÿ g?MCט^4:ZSxM!OI U9Yz}ǀsqqU*! )h0e`аi ZAB.\gfQ}9Ji8'OB!ZDSf$7E!rTϮ̀gP3 B0AR6?.)AI2~aEVN]>5G9Ϳ29&YʝfWo 㜄hj/*~ růA5Պ_|\Jl9;N @&m I9ʼnKwy<JϣJĽmqC9z/]czy>{ӳluVa875(r@)SDŽ.9QF[V)Y1IgV%2b`y"=A%- hC )ܼ8e`e0P,H HJJRY 2MmmҊq@pfc r [[Me?tiu8!1Z0v]P řՑ#<-"X Qhij$ऋ!sوv"Kym¼̣rbgzygzՅ*@+/$Qh'Jf-!wqI. m;j=vbr1{ϵ:5艔2آd" =_~M#2+7IG5yP#>2&8jsw#fQ 9c!0H%Go3ފeW6t7&jCSmBѕtrI+LW }Ve#Z3TZ5*kl,& n^`7!R0c57cZI=z=ʋS)2?[Q1Ci 02%b"2N}d"Ct^0Ƽ`z3Iy(E9`YVDG?;܀@θbr/ <7{k r{to^?rJQ}xn)BW\is);}xHfA^Λ33'|=v9\˞Kucj:Ww1}c~bhj%TFjliOBk8XKф. [^^ҕ *G]F](Z#i9$8٥m5!IrؠkPR+ FD6m˺ ʒ`Pyf1f],Hblk!:@?G ƺ`Fnk@ ]68Nx$4?^()Gk9v,xsӀ_s9e!>gNy7ƜzՀ辔jqH~YS^l5jނE33-xl-,xwc cN~꬙sjaʕv' 2lB[^8 P`x++GS ID]O/Gʄ,`$m2m`w,ixt"9م5 GSvy_ uSs%rU<\?"D AE{}t6^zsWgюg>:w2=~z"t}IE*E9idHs ]0XM4wJH*@h^ 2!k[ޥ _Q~N~i3.+`V?֖~ʌ#q7RcQ {]6sNڠJڠ#96Ϸ:g{k%Sa0 k#. :GM Jo /#0V(th I䚫9= j<ΪTG~?: Wu/n[{ƍr~W{W)~ PBRKC1ȑS[thCosQ&5z+%5X'w3oW4׶atw;5Ȓ]wgPٱW7W'a|`^CNU'b֦Xx~xu=? 0=>o_طy.&ʇW/"ӶEgI>rv8Drt}f*g,{hƷвD%E%xNe3cа; c-ם k5 4gBZ9}bZ|9IK:W~o/|iu?G $tj2hy*'vu?VEpU^,kkkRRIs+2Oip礳g* 10)/ f F f,RaPL+ 54A*FΗ7<6cyoe\9Zk",N; $h tQ ћ!֜ls4)ŸB ,\bH a-k[bLJn 1f}܅ M)͡ZETa`+4<% n ǖ< FփZi̊w y):q 5 :4zQ7ό+^ff*OHH`P@(=l`쑉oaAZ(lfIPMKِ%`,;WAqP`ω\yOm”5M5!zz dRفC U\o,"LqαR wfBO#IJJ͕a4W%uTA!s$}M匄_nՕaQ0+] 4 ˜OwX$G>.[w.d4.QQ} ܽx\ tLGW=ز|Xr|E8f0|\;NUÁMü_]ܙxd/VC_D2)b5v[J㹘Ƕlvb|r~Dl7%tfݪ0# t2?얛cJ9Á:ԫrޟn?zUt1&;5+6VK]:逗#IpOBnJM{JM{ZJ|t$ӑNk ĸ@'aRtgD/ 71cl&Q~0<rdg0uZU "a~uy>0F-}\*la Gs9!I0=4"EKzj,y.м;k-/*G)hϐn6~mdZ+q緑D(xꬣE̶~Ҹ"ØZ+teDԸ"TtuYu$\G4Y9kg"uTYiW"Ljo2NQ%o-T!B !}D&$!}*$ڧ3BB !}"$ ]ceh*D&},eη8Dԥ%Z(Y֝*q *Yw,ew(8D P%l~2h;"|ta<wvG,( cr@2ܓ^%P88] kNZ7Na_#Ht"8BQEE-EEZӘԸ*:|B\1I,[w.dtT ܼmbEp)jo*B!Q7 /7Z ER3map6w1IZM1V#̼9@,6(Ps%$.kCKaU!LNQO1ru&is3 .ͧUpU.9W~ cėR#$EBK )EQ /sީ F q=N\r6T< xW/TFaD3wŠ5sZj`;-ц" C8M9OYj7iS6wY ګ A'Xro )XKڄgn(W3ѓ1 drS3Pb>\][[{5 ~^bqε I)7 ސ7 θ\Ѝ&rUT~^݀"2p/ ~٠MgGח{hRo^#Mk![?Pδ0XIE$dk+2t+ة/&KGQv6Ƿɝ)i}Ӝ+zV{N=tby9?G n+HV( 3TKK?s)$Etov1eGk[ؾ܎Wegߚ.)SPa™lng<_H."k0=oӸLÍ#3˥gtamV؃wV)< Ԁſ")NG hoc]54IO<@hi9v }XۃZb<"mW{Kwa͊; ;ɿݝהޠ:JG_v &6*-Y Q9Jq̄jKGq\Ċj 6qfF%(kbXx`AK]*Tc%IIL`R,Q8ɱDx?pd87_qT=o oWx7*M+s sp26YaO'qp[։+Ǎ,@x{p<[^GQ{03.9^Oz(.tXmq12uRFgW heT|)O8z=PΜU[-剥iRO,'b塈i:lz%)IE_:iհۛ˺a2T[jS`&Dv4MA}lcdsXխHUc0Zk?Nfa\hpوrXt @J a܂E$W1%3grBhQR!KGg~?lQ10~XU\<.7f5ߐh( %|, '|bhADddCg0'g7g)f{]34?Cwnp?d+H-ݻo3r ` wiVmK8{Ԅ#v )Xl8;ӏ0AJN>ʿ 'm\x>Y/i;LVMuhQ;(Ŵ|{oo"11e|Ƹ 0=NxYrrLKkS=n 1զh QFQ[7ΜHvӈٚND7Fn\pIdX%lc_V9V`>ڸCAXQ:q]qk.X>oZH#@85~ R]zw5v "QUsĊ\Qp8`lHnIיdIKyQRzF]\bGŎә(T/X~bbᥢ#$3tC6Қ*~Sʥg5o;]W)bKΚ- )r$G&I\ f`F0On##R6>Ƿ ~,ͥ9N.^¨Xc?G 4zѿ] {Sw{ŲEH\""`2L2-%R9eUTکYh^kt 5(zrk73A8_wONhXMNz1Q1[NeLѦ#naTw=Ew &CŖCڎbm^e}G [}-1=yFXAPo3hpn//HӪ#[Գm6BQ@=:!.H-h4hI=W[;hG 4cwQ)V%=>v)ՃP?%+}D'ߩt>w M)5tyԓ9#XA[_|w7 Kul撱OVEKAyHse HֿhUKf&f P>G{CA]=EuIQ]b>|J$>2$?3 7SU,Urj1DGC—tτɻ!JF;_!uJ*ZߣWn%U`-jw*{g ɺ044KF*)q+x yq1瘲1Djf6ϝp*IK=82d QK,<3(q%ϬB*CBȸ /@/^:ÇwG]}L n=XGsX$x\+1EKR~xΑlrE3mrJs),e`t{F|Ӹ(75^1ުM:_4<[~N(_I Ee\ڙ mH:v.^k3~Y3:L;3~sH֥V|q'*h۰f@;!ˉl ޭ#(h_ +gj%}|A[W 1{zpEm2䒵A3ڱ8` i?诽l?3~si?nA=k7j ##_{ke8_ F*8{oo'3'TRMnI%{;l&4OxZZ>R[-gEsDp.*:oGѭuߛ0p ~K"7@ak@hDBmAQ:{r[E1{H!B橉ຒOwz161J Dqzz5S ӝfGPq'5Vw_"*q{4$3IW'c6)d/V1_,O[J^$+E"sAM|]& MSO.MDփFC56lK*8`B5ȫ0}x?&6ZtR@ >lJ0noh~>ٗV8_M< S7B$}2Ks( cd,}yƩ6'ԁSޖnZ'mt{sK{6T⾽Hm_ۢJʼn}BU"5?B IS^4@Ru4}h&U@V\zs.Kmwf"1/JXj=2mb}Q{aʹDUҺsvOLk[/l,刐9wG4fmo"ƕPu?ɴ{[cu [iN!MO뉕RO Iz|k_ JƬV1hagTkܭ9VӬn&x uVx[6b[{}WbR\ΎWV  iO닞*H?`\t ouŌЯ}{|fuLo/?ߟ~=g ?tn0|<#\ԞX{fֆp[e[ԠЂ9Hc{,fH9"00Δ䨢]H0.I8"LdEt51#uп9/7-R^x⟢0m[EG'CP3[A՟d [}[b Wyڀ5J"xn` sisHn9>W *jV:˃ƽp]0rPb>V!6t{?+?c78y0&}/ZsfFge >UbFnNt7FUU.tbKPLyDLcݷD)A uZ$>Pau_kiO Ș2)#( &Ncba7802BheU84Nl|wfH2ԸoLp(\58*IP z9]oz[#Ϳ"峿#~Է |L |Z% 8 C-s P)prgHt0ajɰ($bq"nIVX>Y:^pBpwVT끄1\qS\bG >J@HF&*kKsKDCr҈$7Zq,R, 0kɘb 28De%#1/Yh-XƧ VJ2k07,R-VC72PjSb p,l%( "2, 0pעb̮/yw֦5aT\MWX?IJ ⤡򅈌>Y{ww/F CkXg.`߬oZqg)n< BMy,MO>4 F78./q_.KbiC6v~ h0 %CJwr'I}вsR7@H\gY9$2JاT: Qr'*f= ]*H7 ͅ 7H>|~iBNZ\"'op9%!f|"5F:8Qf^nm76{U8@da_nyW.Wt8bJ6~&l} c_esהt zDሤlziGQllczxUɃKZUr3 '(!GrW5ܚYbY)TnRL68gn'6a)(cDtlޟΘl??+}@;%1I gұAM S3 빱hbB; x(C-Ş%ǘ0Y/{Z$A&,AM3z$SdO[#YfwKh0M~_XE^3Kע82EU/ˇ0r\EC{`ut2j MNd(#Z.H\$+*kmLG틪OK둎yČ|tƂf {s}}ew@KhBnx3O~TۈɺދU>4683L>nDh㈦ 90DP5O>J[EIv!+Đ\ P ƝP&Fr4rJ&P7LGad EmSc%&'dHcƱ`t,IQDƦwGJZ _9k+λ} Y'dYء+Gλ$=kiSZ5C-3u:Ԝ=V4,/ᷫ6Oi0R) ށdkdR/9D0Gb6٨].To\{-/~r!ɚI>9֛tuAN/'y,#ξA\tˉcٍ@*b4W{\NQS8GN!R=8t45_lTtrNrJOaMї7 Jva:͐]9͐ۃ+{}0V‡= mUa:wڜÕ&jw:\#ZÐ{ (@Q?B)<RI6֠D"kUsVU}NuAS-UEbQ{fY;{r:|r~u%C af3]tQ.1g-%eUE2"dB={yy>p ks:OW]̊+iI(+ʒQ"4[qOxY%HֈH2TY_58]De=dEUFԣ'N69=Py^!C"6!KԴsH*ɟ=SΥq&7kjd blnUȓ:'tZsRe];3XD#nvpdzD}.q!ɸ\1]nLؗX f#up64#53Ri؍}p؟|%O] #G:tyѦXfpv8reD77X5yJY}!:>+y³ջ+oP>>5'jE?ngTnyGRIvb_UP>dG}߳8{~v__Ś\_Śv}f!+K?ya,~aU[ZP OB\pP%{OF-(O|2g׀)gfmy0le-_L+hkU\ՠs^yЬTFh-.{Dx8ZvS(i@ {(5.%p$ B/m0&*F#w_]%ZNR<ױ-$o~MT^IR-$ mOHR>sĄF:f6gEԐ$HΓk"FALLdXQD ^ \E F *F/F$ jU5T+IҫWkT.AƳF8i%H$DFR.!.Kv2ʐI9U@m9> l%Mby%⬳22NH8z%nc)lTCX%;Sh ШǓr,ZZ(%h)` IJ ;a0}&W9nr&W9nUwQ\T`cq* Z%$.`OWi@1>)Qvt.;%ht^Z+Qԓ=]0ʴS3A6>4F)nAҵt@/(Hr}q6ij5' ɵN!E#1sCtLe7GϿ-ߘG,ZIF(͝j]1SK @ܢ҈E_ ~-ooE8݇j#)kUY _h<ݵhP 6ftc9O *Eǃ W8 $Xt Ee=PczR=*fn&o`"چukKP Ae-~AxFMCpJ2h"*uB(\_-BdFCJ;~uGs;\pR.@dvAP" ņ@# L Nz^( WkU')Pc:fIo[ ʝm ;m63B/ѡB9!}4 o%CGYpPŕi4B 2hI3N2zlQTE'O?#_$JEJOZ$Fh,pTv N@CGN'hy޾mz0:PT~i$dﱠV}.[T:4!H9H LdPM,Y4b{ T28Vuܼ?hʋ1]Z}"B$f?~TG+.R(Ærꈚ⣊D*g?MnHV1Ĥzt sC]#P~ͷH Ed1K RBVK? E0U ͌TbpL3Y!ZCH)y.) yHZ;(qմa #ڴJD.գ៉ц%ze{k&ԵۦţKZCD"9͊"H/]p0 -19Pm$^9ʲX;huǎi5lTPySy\ՎA~OM!r"h9EzID6]QGW1Gys|̎lPWbpjv̧``%ZD&K2uTs;2 +D ) $ȶ}"ӓ}0TS5KtN]F)k]] w] *$]i ÎbTɌz5fJN=-OWV_̪ BK7ݳhh(-7Rl6[qy3Z5|WnveFzgeMМ}޼~9! ߽7W&%M¹0_vuޅ =8}źx g-<^Ŗ5b'tR),!S!졭5G/rZwƎV 6'.SQf{VxC\  C^rbly=%hA^ּr=3<=@KU[OPU胔߂m-j^M!;Ɩ <ؒk Ov[@5S}@ہ>@kCq1MW\wl z679.<@SO *Ƕ*K{!m+LCA0\_ͮC_"=N տa=D ^^&!(/#FD֥Kvt/)*pYB*GNtvqeV5F\Oq09UOv= mh:9R2]I8eeWWt5.#Yvc8d{;GyECX*w_;SXVryEaQユPV>Ң viIæ_o=i޷ً]z=إY8޸OK+Sا囦sG֓sp:|>]D܍=0t:F3l=߰{a)9TVۛ1=ZHNI 'J:o.k^sތ@k3]d|sŒ͖wC^eͫ |,xy" ri>'<E+\L&I \4ɖw:"G/EdlQz-^{lK~iphu施{!-8Ajn;Ϛ YSg %\OGa3E),j|*Yhzɫ!zĨ` ӕQal$Ǩ@J !ʸ=BM31iQbpp&M2*8R'BPxiJF`qeoBQ!dp2iq1{I:Fyw:Q!Qr)4it=' ܍={dsf:F3l=nQɧLeɏ*7wczF KҤa-I=ƤcÈI: aRb+mH}C>v@v}Ilgռ4 pLT]ŏR3*Zp}7ʗOq\^R||} B#WJtq__r4[l (nBH2N:ѤM{q` ;oO/}T9LG?n'L&^ue:m00m ;{1Ű^6\ManQ JX=ъz眗-4"5)IiG3 U}pa8< Ha Sf~4@jjeW{qeW{ٕ(j#7Bzq#7Bzٍ B?W4!2ۄ7?t;ą.D8w *3mˊǟ*/+y&woV>2޼TL?7~ݛ]6cgkq3efnVFm.Q<{o&}(יv+Рi2Ѓ3/VFQF^ټyCW3o0d@tA\']_F'U:6j5Ul}fëD`U:P6)(*E2l?4MDuRT쾱֌"Q&]:\]x1eARn$B+y=o6ǨVSogKF\9M5+]DWJ&]ZgW3$b:P6)bVn"iglw,CIZY(g9,=4>@dc;y&P&`.@fXmJ,fqP,לp0gͺ?S).( ؤ|(Ł$<(́ТfΡbF4zz9/2c0Bu^DV{qܭvHvx _|{vE_}{SIxOOTAvY<ͥOX)7CTĺ7^M6Qٽ{30.c맳œ:Cg աl0#@f%+Ȍ]0]<'w?q%T\A,@x](U2J;߮+ ]ྛ/+~s?}zȪkFM %,Jb*ў{猠<"*Ka߇X2m`NMwW$VgcfNDB 0_Ϸݫ.G n &,U4&4Rf醧>|X/od%E4EH0"ȀRo1GYl5RA^0|*eo>֞`CNYN `x,K,? bDUAS`7@ 枷~4JIZٚx? R aܔ@Y&%RP $€&ΚX`w<*)3nPø[o6@V3ha\F]>0k. rv@Ȅ &M9^nf]HIEW|M'ْw؜rDil_1AP˱az 11 *S0Kx? 8w2"p) BKΔΗ^X+t^*9f`@HMP!z.DD1S\пt>b 9 78PgTfGbыyD/l5qK "NhHO7Ԃ *BĬ'&Fd_q<\߮:7 xͳW|Ǚ8'a04a5<4[ܭ3 ~:}_/eSE0 mͳ(?8OKs1Wyu 1ΫAi2`t Y+( Zυ7VI`AyͽҚJ4Dkf5.ۦ~ϺHV?896"spB^B~pyjHCu!Nh5B;Ebmd!T ѵR`` QY!!LTpO0%㩱6&l^jp<ѠF*FN.{e+ϭ68;0c+l)l0I#*!uaL0$Tc [?(\~ X4ק"cP4m#Fm(1x{~`t6grxk[dY*Vj'UmjAzԔLd;*vu0ēGꙃ!85Ye'G[g0G׈\Q7L##r{< \6.Rh>nD-@6۾n)xu/F]-%3 BJ^kV StR~!m|51$f rt/@&=\xz]2J[^iT2}QRyt!9s")OgyY©}8X} Ўx\Oe"VXd U|`V& BF E!c!^3E[As03xǰTqJeS94(3tOE:/TlHeh Cբ5*kM"z!O ^. &/ vzFNp;=+˪0cReݐR nH)>xeڔpGS"x,l#!pp0 v׉#;{nG\z'fqf({j5-JpWy]Ej1/)-0CN`v TAdjh4ͽELjN(/8XIXI>:3@M vׯ4_S]1C4^^pӚClzhÛw?Ѕ;[p1䐢hz1HѡўD$䉻PJd"*qPD>|rB9䉒c^d<[lVy8V2oV_DSH->* Vz !Y_|uRȕ;#'IOz"R7!h:NKUL4`%$捲dFKoUR5*tvY|9JL?'k{&]י\&RߐL9i3VU#->9L7S[ 1;8yJ, rW^7~ Lqwm i8i)C1q!.˄`S!b&^bdKͰQl1hץNI͠v66^}mFF ^KUOL5k Ĭc]oCDF{,KSiPeSTK Ѳ'KY ^#Aǩ_4$S[s'b@ f{FqԦٳ~|ĬYlbjj+뷞$?Qt8o }!j|㴉"`rjra{7Ɇ?ypKݔ3W`Mi9+Ţ!69^)v&upNv\tފhSF霪@떜9/+8"s@>5kîL|!>D~nUaxMF ; zGKQ;aH`\34~?Y[HyinliK.bi$ꞥ] nJ#%u~Y.ܘ&bm($ƯYg"fc_&k&*l(k.4?__U BIXL8\έƊoIw,Poɖo(l՞#DF!qlW(!e>}Jȷ{e[X}%dRЇZ7dB$r3WEHFnuXʹwߧ뤒q| H)o:$BK GjJ,&уԟ;/|d9_l$R-x# fEx,b:h@$,hXFdX !Wiȓ7ѳ 8z02Kp\ܞW3t3 ߯d=K۔%uK XzbX,q><*@l6KCprm9lcxĨ-K .decZm}e;r]TLu1?]#vHdCD'"qFphc)m >A4Z62L(mf:p\+^2|Q^bi (!VBvS T)$rETh*NzGLKxRXRW]fF٥zR[il,s;.Gb+XתA_.6:Yo%*׽FBk;7CR oSoǥ(ѢXa)y0.J-f;yڿx wEa)YE6j{q։h=޿ؐZp3㜴^Qo{#DB5aDZc$X=j Ǹ*'qe5 0-1f\<+?8 뇹"M+YCc FDyfz1s>8&SC#Dmy ]ٞ,Z?{K@ i fx9ǧONyo^y KO/Q c tI*i۽-x+pl ے!BAK} Iyxqh&ƮAo .1Ypd3'ksl"5Nt_96QQэ۲G.S Z Vſu̮%0^.\ wwѨ %`R.̫:*]WM&l(j-Hg&ĕ]5)`62`\>z2կB^}9P=X^:ڟps\FYGhElgSa)8݂9%u7ηuٔ} vFqTZ@L: ?ZN-S֔O} yO#x'S(ނ˶5|r׌~lr׵!&{XW4r H#!c}Zkvokז}F^|ܵnv\)!%|+Ŗ Tf}ʝ(xC! g8-Fo̔]sZU1FFd0Ó+w5:0ayT֞CNIOeq̞zdu 'I0Y<]S1*J4YY1@igE3N3D,/ #]*VA'CVƣ塣GI=Z[e_{آ UUza6a79Pj4Ej"򢽖s_$a"ZkǽzssLs"8ISQe~~4 .DOՍ0A&s#<%ZRjnLJlňE7m!mLo=ޔË*:n}M),E/qJMc%ܨ&ral;bf8#b{A9~抋8{~??|ͼ=n?NֳU\OfoP,ߠ<|;S~ ̭ `Jo|7{ޛt`It(/V )$e)wLoyNx4+wokp16tViWqyɫ|xe)'χB4Rm 5LlMJ0Z0Y|M՜W-Eb /uG6HVUT>go)91,>se;(4u|7gD 㼯}x2=("$,xP*BP c.L0ₖ/<0?ϫ5bXjcy_bdkٸPBB$1ދMq!RBU96x[h#]p"qbuNBi;-" ao=଒f䋋Fd\."FS d*/DS) Fˈ@$8)e"& il4ֻLZ}jf#q R)χb1m5\}µ!O5d7~z6oͬ{jtRDgnx{ ˥Z=FJ\'VCv4Xy/˳ز5My\E0_Y/"*k""5rNq8;Ac|XL .wte 1):\jPuReItk C[Tm CT{<׻._NA({6k)9d['垜JJo{j3v^ѹ)#٥]YscǻO]9KWQ)j1:&\cכSB:86C!;ҋÐ@SK|y_ 8GYOL C` ^C"Q<8g -eR+ȟw%: gWhzm͚t0{L1h0W1gXr>C_#pN|Mj{V'R'$2i9 PMtH;<3jFgBdrPbVlh:P#h:0C oε\ivǬޗW+ɫr}׃/Kw|?j0(F"1AaIF_oޖG _6O8W :2` U͢gArFA]n֥\r]*uK95`C{=E-PN \`tB#pid)L4R)@gAFAAGMFblЏB"FS#X];t/AfzTeR:"3kq h#^"ȉ2y/.1tS6gwc*oWPlMW ɥh5*8CKLjI$xMYM$F P'4ܝVx2*p"reV;-|LGu- JPe\ PpBu *צZlM騻8ESZNiQuJsk5AU"rNjU_s!=$DT@(WpDӂjj"(rSAq)LC@wNQCyG^,P <#Zu6 tAFf-)4zH9tYk*Éd4)e*ԾR|눤MB&wE#G4^i-~yTGawX{x=jtY= ub 4,ąG .x˂S)5N%eӁEMB]4yݙY dBB uJj\(I93+*1F˥h̘iDp&8d↱d=m` )[po1H2t, Zt-@L]]qQa0!)_ן6$$#6׍F7MFZXF$-,1Ad0E)0u$Z 186ŋ<;xM[P%G |GA8 $qМo7ϯ f#^O]/{ ',O8͗tw >_ܙƓ<,Ai2<`SXͷ7FB$||j,~ǒJF*z!5q SV/d)1Q1*8ޒ"RM=?5)OXCᲥ MX(nL-k_QB)6 Z;VB 5"[0̦Jgm<5|3ޒX#b$U-ܖV;%T0s2G`os(-X 5J%0Fe)oqJʨݖhc{) 3L(Έʕj-#O`bN  3)ᙥ)\f,9NTYK̺ TmfԦ"0^(mbw%DPآjW`8gq}1BzL G]2g˜*0&KKpw^XzPI5w,y׀WgKr9!pB=(~`KJZ0f,>u: (en5,INj4rMYb1 a3XxNcTXxN D0ƥ+D>W]v +Fe'ƅ㩫L-OM.?XJ]M`ז= ;Ds]ZbTs7!7nrPW(<r7} /a Y1U[`juo1nq8>+bk9x{AiڃPGXkM=TTrb{KTvfv(?"HE/eWm6*^'A(Dla:ř雇 hY@MSzx`lwW&]:1| mƔri?_(%8z[%GrK 肌8ʸTidy|1&'Z71pAf k]u d7 cޤcM:bgq^I AEYE>o)ʒd`z*-|EF+ X*~"M?>nчd0yHԇ?,+ţh<(*-#C#giL:ghhxEW De⑅f7U4 /VTFEWJRD!4ET$s0I|U6+uQ8OQDZ.UIJI\^3HB^] 7WL.f6Y*q)uhvO%&AW890¢/T}+jw+L#PRM8v,b Q~0 nE=3A}#/w葢&,vcCV+M"Geߩub. Y"qƍZe1 ZԀ`REe2a 1*O苯V#xFdļi{K0R M$YFRTR/>BӤd Cq̟߾{kwdH{wQ@఺zf^e<Φ*K8 Yg Sd@2)FfvE *z}/.^,«^ )& 3_/}1y^R5racyBa 8)RBRT PQDN,BWQS%B%NE{ʳ|bǾO&so׊r|߰[T >. 5ꐮ[WͪnbnPt$r\RS{0T0IP֦i$Xk@ hlH(­.+Q BR'hqHS r\)-tzZE EsSn@%€?ISzj7'X5s\-Z 8=T3$~) CE)Yed"iǷb0#5`"gcC"ΗVSZ8L.fp y(!N.\1L{;9;]r>zRoP=c1l?>iGPt|VkLOR 8H@ݹz{R]p*1&<71/>a6!!e~CiK[JP K|%g 6vz8}l 1 /F KJKeA1ե'm^}jzWpKB]~p˻v:vMɱkmY+,Fr2R\ƯJdYz1&-&Z+0Ưiku;X°}Zz{#7^ 3Jމj10m򶹥 Oi.+{]V]@+q_Ank{fJ&ְSK4֜[g ޶ǣ.xAl<ڒ Dvwܫ אkѣ4tT}ɚẗ5#ZKI,aZAR|ȰvCmJ)MP* ƈ MT5ciN((ѲT:0k(kі&gA?(&^"sQ]^RnOi|$d8YxrZQ1 SAr$˓ Γ/F1*'<9QRN/4O:ʓɓl.6b-x6#稠? cO\?>{;$;`?ƏvGQD>i/3P fEM&5b'u[وVrg#Ì~m2ca "֘%r&:MEcUATz,2di9S_#d\Lp6ѣH/cԗJiz<'ٲ@~ME+ܷ5 s A_k f4VUw`п mꬽ\b򂱫r5%˝K0O%%X'L$ $˴\"q&KX*T,BHRr>sj1{qva3m*e걙B!EwwЋǸj/X ;cW{JI,.;cr%5eh@$f(0oH9;ES4LDnP\),#8kY}EODŭttUT`Pcx:5GZֺ`ma-vD%?yӚ%Bot{o&B{ssS_ Y+8qL R|(sΠ3,$/]P ^ y|5QGUlp?xHܫ 1j,pobCr'f1bb~dx|iYxǘ7yTO]\i!cQ,ޮ+C[ZgkkYgR6&Wfn:ξ<"\0f%hQ <--N3ĸK%e%.B5FZ25Βݓ]FO[je)GaؐLL0xVmKwU! aUmIkU{Vd$hlWy5n/e1%TMK:mZ.I_L\&g^v \->^!Lբ墖hѢ$ų1֝3pg:Hxǟ@qB]qxlb!%iA![tū^ $2@xqIJ!}@"T Rr3)؉{2 U΋ݔzIGO kc_$i//#ke Bs7 B^XgGO==L`cC˖,;Uſy@3'{׏yu^cb[N!RkdoRNrY(M8 s ctjq#5|J(شV L%ZM4JJa42qKq&kSLRLI2մRmtƊZ%wp>?()Vn/gVuǒ^J!-dzɷծ/%.tBO@)^uۅ*}(}zS1mw_.DGCUQÆDa*h ,neN\klj [+5eV7{5 7n ǪL8Qg%͉0>l#Ұn!.9G a!pN/L<|hdY0?xGg-/w- -j4dK>}F5[u8EYW>u&qkTjJʶ]-> m/J4beamt[Fptvjm+ (4ktFϾY(pZm5NP#PNjÄ )Jn:$3'<%df;*G58)IxF5!x7Y 6L72vpM=+ɟeX<5Okvrvѳ"}U ^ UTXS#:sE,J|O&BhQ_He$Zf6#YjTj/߻X[Ae穱ncdq;4>y2'6׆?\_k3c 5E: e!$*E%H*% "|jS'  J|D U7 a;n*Lsh s8O̫0}rO& &?l8ajm\YsVLRHl6-IzŶ$JJu,Wf3d[[d[ˍ)>C ̄PZ4h.3# iW4nw`V/`%ށqFN=wN,M8udk1ىQrT+.E'GvXA@6.q fDB$Cxs8J!cq,x?0#%|6OCg8^'$Ớ\+l/g}{Kd;AlbSLgLڷrܘWhRj"]B/Mߝ/E@CyiT~e+xqF B1ݍ}/afk .^^7k“턾+snm~ÂZ yKlp$/ ܾz33˄ ҾZw]ւ;`Nx0ϮO{ Z__oM̬8i7"!L\D$Ļ'lZhB-Ls|~nȓHfXqfu%"߷Ҳy@q(,lk O#K aISlڽvf Y/J3Bde%9'vxȦz ="#'0>ueA8` Hd#=.p 20=X`MM)92tHnQ#W%Dn7#vT6'u:zafڭ3`nIGON9ha$/|oI#k0[ªX}=1K!A@;ؑ,8t[3t:LdA!SBCJ~8тlTorfc3*ޫ7Q Y.Tc,ǩl,Ms/Vf[ jX_=y"\Oݯ27N59 zGu;9_e4 ʺ9ts|񧫩6~53.2>@:KϢ?w|]tTIǍtQ'۲7Bcfp,|Aj}nOAT ڭZCv.G~jZIsoͧ2!0zSu_*J-^pwq+GP2"xN=l|~'U[ U 9\)m[΢0Pkޔ}~pڙR"D>Ahq n_O< E0i^= /4^Ͽ?zsD o?,~4=iֻA8H";+w/0d.z ͥi?q4Ung i]zBecuGZ$z (;NXyC9h 9F+#@,`Z|fL2#_n_c$Pr#53ɿ<ˀ,FcRX?$<D`DXu%|-C/| hUxà<ߌuf2PԔp%?@IvF4yGCo 5iJ1W9|aΈ_ILYʗ$.risBEcqm3_T^}=a4kO dmn5oA5oF\t,譧?Ǒ)X>mݸ)[OFS̱07:Yf (` "bf _6XU4B^]kLqg',%ݸ.Hu1(3t}2m<}?S.6&hYVyCt1E ѶvvZ0z竜 9|V1p  vR3#L= #߀B)F{m\s#`זIKfFf̶F.'\~(8ʔcKQ_>=. +unv^C5C UʢSTs6ˠo#^҅g4Pcp ԇauT}[Iܕ4ۺ!,vY9e㨐sCa*ٶԑ O |WM|ə#o'pp`q( nf5ڽ[sܱ#tr 6]\;C-a7Ȃd*>\]L ؆hH!Qq֗RڶS]Ve,#$fbr(kiaNb%:o0/+4gԊd]70SqE{%}.3Oh,f* )QC3Mh0e$pfp4nؗatO}~QVxP 3txWz"yu=ٲ%:xuЫl3Ȕ i_tt%Io8sۥv( -;2e3'cƹȂfWL/R0re*Gub7?3%_?N-k0#aMNt_Fm|//Ί'M&_3Z)^L[V8V3d',}]ebllΥ{ ̊#T{OS1xaٶ'D!&w8y5Ba!\1rS {Dsa#y)wЗ`skM$K>-^ʭ QA̔_"^`<;&ʶ cv$y={X9=X8ӹ=9rG|eI呰@bvͽ׉:!~v$Iꝰґҝn} %ŨXfIWzPpZa.)B]RԜtGQJ`ǔuIN<8xםJٵݾ#7jat{_,B,ř9W zZ̬w+PzGg9nZ.uw}4g®}wui|*P]|.Q"%Ǩ6I0AtR{ ]qxaZۅ [lMa^/Aީܫk0n+WY%vlf龐S[;L GTw[C4WI{yadɍHJIO8i7NbqvUs-A්{ymmA>(z,8IE]?HfMZ *z$0aV~u7LQxQG3]r@Mn.7O`Դjb/6meFk&N܎-tܛ4dI_ϩGq G1__F{(&~~~}u}y|4ɓחO˷0ݨ4mLyj_ sIë J"}ߌLɹw IGqwDf*%yrd1oQ ­w-c8[9QT؃Ip_ ?h2N[Д~Ay:AIfJG9EgQ~]]5oT ~pY`4G/b01*ꛯ뉂zQ'G!/#gQwߗոtӏT7w/?.xGC2ut?Fퟦ T`%񠗓+'LGZ:c8)j^vv >;+}})ekԦQ7Pm3x{,EXedO-bQ뙎q3KxjEYs{3(`)-HfypKѐ8Jbv &oVafa2&7)ÜO\?#,fQ;XApIe,椔(#&܁Fdu eo;zY~N5&z%G6; Se$Hب/9y!(8 IJz!u\!Ƅ$fs1']CREvI#  @ ǰ@-`,Itp I@Ωx<ͭRQSХ%= F2CDY3EfV>ӫcF}"Sɞ9vHpO !$uhP }^}9AOM)ۗ6r>ZCB{!}]خB B,[{)l :̣XG \fRV 92Ycz8_!lvCzl%X$^8/67%)'`!9f81aOWuj7d > %@`IP PFsC3[̐ðC $7ZAr"r\$M!b0O5NQʃ9 ; \07`0"5P}l4U2}Kn 0` ,AN6grEn'';rӄ8Ll<ݐD\9i=ɚl=.x=~n~y8?cXum<۸¬i.~H. dJBs潚{uwڭ)UD;hy!h՛vk<Ѫڭ ELBC֩"uO|+dJ7j-}N{BÕ4P-V1+ uҙ \^pN2-Jdc75qVl5yxa\1YS s {h6\/4EEJ2B3后]X)9O ' wU$RBDf:m{T?jӛ74WY*x"".<+0'{<ly1^YU}vOV=#- *bM֊2n+Uz(?[oél:?]7/`zfSΌj_3z607[8V@BjQyXBưNJ:6RjUg[,xueEU[JWuXS-+)nH* .:zcːx㫇/_bvN"\Qь)Fd |4IfLQ .:=ozh2͹:/.Šޘ֠2_\ c!ϱ'] x;~|l85x}<t<My cym3O19lm<9K.4W}@lPE-WF㛐bgUy'C`%TKۺ4J˓X%cFX  (H@Y-f&`y*f?ڪDMGi]d0O\4FAe" hA0a%S\Vn}86:mƓϽK jšb&PP{Vu;g _XG)V KxA4Z*PVFU.rYSqP -\{N {EP$밶!0`H)4`S k6\ub.GbJWl;g_15l{)RH@۾'YhDd$r@u'3-fz_Nw*Z +4'ehkZ K$rb]UEQ0:Gq݂10% T[4(AǰQD`# #M|LR[z`F1JBT6 J0LbUÚ&&Xi|Y+1^3R Sl>Z-+zvq|n]Ri#gۂuc5RjѾn-*m畷Fg=!X(EՊ,8:2+pV*0LKD"0 4+PD1$e Cv!phsҀ'ʵ5XNT<6F&6bghx1,|MÍSY.e7ߗgټlVq5e;ZE%#KRqH[fQk`mhϔzE`qq!J"eFV`q MȌFt 7-,%腧5ey@JBcEp䚐 נNotD6iDU _Havj4uV(C< LޑFA>C30>ٕЬ4ƶ5+1 <]K1wݚ;N[h#*87!i?E^ cN{iOhq%r{ ev/}IN8 ,msF۫߃OMjw'sIټ/g>d=v}rS܋*bȯ34gzSegVʰBtzg/>0?-7ߏ|v଑ c+_Ξ쭏f1Ŷ `Bl|>E0}$:i#nS,Ij֙_.VdM3͞Ƴt{1:BَtUNVUt:vU{Um+ks'^V&1D2c!񎭜,ywз., Gx|fL[gnaDe*CUY2T{IUH*CT+v,] vNСa SCɣY8;h¨lrIP%򎃆 UTsC4&@]MW"ô1J+t]Z0N?q 2]QkIBP݂_<)&*Q1R{.X q_>X) W܃e=RcY,F Sd_?6cD 3E3Eӽ33E`d*feea N`fYGE<,YW#W?}y5ph9;0 E7H) (КMZ*5E"Mi3 ;!md[6B wɴe \˞+jB;e.YNQɺ.g/4V-MsN ZBֽK;/n\M|=W&#:Cm8^ՖSHb9}BbKr*-b_\x`:pyDNaz-bN'т-t"zɄF[:}7"։Оݿ1C F3of:1!OVlt g%K4o oMV.o߄L7vKQ(d3#[:B=.̠ɌW6aF[nf#eFSE E9)- {<\嬄KVd=5 Usmci'x5䉒ˢ%g35-~:s~^zO:{Iz7x|)PswȽ~oIwlSi$ltF]d6\^pB[-/ݟq wpa2SmWuϱ]O 1h _RqBC+3P%X0ͱ*E؉kk⎤qg*)=w_q sO**UڥI:b'=Z]v=lRm~9)C5{Ua)cfq?\qQ`ʟj@*e~-㩎;A]P?)x&hg<;djw0U5NіGOSh_/8JAOvMY]7a{5-~r -}w`&u6Χڊdn>8Ao?O;^6:񲝓0 l\VS[LcFWsQ< dcWoM5]==޷]#D=4zI*nrBZPG-BEƈw/g2:a Mz cQ@`(a`&\cK)C_Xxԑ@Cܟq(+5Qv iK *a&0}}(Ua]h,QZ手(="xoB&bD|` n(;}A[c:I[ػtmϳNmǻfYnn^)^GDF5QJH`~$qo Z`+X΢4zΔ_h唊n[Rq`otI!;EL""H̔ ,: jo\P]*jn%񖨹b/#:5tE[GA4RbaiIb{*&}Z|^缡y1;˼9؂7Up^0cRfp_9|'/t?g- (tPuwqzC\/SP>R/&0;YmCTY:m?).kY[Y_ds8"e3ɾw2 "s[eӡ].pk.:`{ov^K.c<|4H_OGí~,lB7~~ ռM)T6jޖC5oswWNd,ba5)VfalD?ߠno'yVgiдA*070Iwnü5gmfC/Cp~qcӌ 0={ bYXᚮxf]f~itWah\ک< {E&[tlmUH&A!8.[Ce|`Tz XPT_֊:š6H5mZk`_&r68 -1P^˂ŀ-^y5f7.X gRAt G*#4>=+3q㘲'۹"M>cl,J?i|^ƒ,$<6?~z+2O8´d|ǿ_12(zד0O0?ͧWWAQ R,Io d:[ra4S3ӧ^ V0%9#3 9LQDA;$dSn <Աi&1&ܻURBGx^ 22%$Y7$skFp5)B%}csE qs]ykN‹!9/(!5dE!1_VF!;m$5xy7ŔF3,\+$z-a;qdz!,!iWcz4~3-]Tvm,Y>K@e_ X/ X1;%`UԿDy5K:jHWYxohǐD[`7lzn~Gk ]nCO3z'У1kz~zI`^%Yo"ZmPiєpOj1è]3ǿdžh 1MŁnde񃳥dkm#h#cu̜@Bq'jj1u$M(XV\n^/=R5ZJ2X~Qv3wOC*3bZxl)IwrЇEލ;8t""n_a;5`:ńAt gRIr EszQS4<˜JE((txˮs^S9P) DϏjp_ FD#rZr99)1-!LCŘRH~T_aQuQ1-"8|TӆiDT0J'}!;^ZFH:zAU;?bbqEձUrCevV_q17@f JQ(pͤ\+Au2/eB!©l0zvR{ ͠d("r I?)fA xF*BlI‡e.#V3 ʃU`@Dy!CFcTYES##h]tFr q+a 2ƒ c#vE_Gx*˶ Nij0}3}׉mڻF4z.t sf\Daedbt ;dQ,aRPkmh6&tL4SBm,oYj@H0 kE!6v]@Iy-%` `m9IyO@bBik;u$iC|PoB~.t:0N/ia-RG/UP#^X)~M(sC9扚y{֡InܡOX4m˭[,0vKj??jHh.FB/H)7(ŝU^ Vwi9G{ih˪io)]CN(!ekG|!-'Isa}}# cEWN.铴Si7I᲋ى%WeeEKl$v9mz${r]@ؖlD(k'ꋲ7x'Lδ526YR0R>?D eNibCT)9<"~_7 G[(e\G 2#b:Jj0u~>= m4ItݧzRi/rLLme&Ns~rdMmܜQ+k\'9?ts596&<>8-N)2#!]:-oRuWu|yR|̹wr3+Vʬ9=N<>9¸=/Ye3/8[44v໒߶EB9 yzSRL *)3垏)1$<90 gfuDnɧ]/oޗ=H尻Yk(*wiK]nd2br]pxg_/wj8gwhЈB˩v.Gy7 =1`/y+D\ @QU?oR޴1[!REᜟ[-YVC-j۴^nWDT^A3N2 q/Yrc99gtsDiڶ 5f;7۽`㣺qb+hJzg\9_g Ӊ`T\y?;7^q-eO._ǗBk񅤋-,6G3Xrt| \tjtŪ ǰ1t q^)[0cҥ%- HxY+l!o\4qa D /q4 B}4MS+5{{5_w>BM Wcۚ_LtvX*l7o¶MS &"NpI˄1 3!n,Xi8\Y;b4Jud(K޸w{w!~ZhI0BJV1?u3޷z9/(Լt 럃2vYZV %Йos^> Q)px 84xl Њ&˺_8{{fq\whuVڕ/ϋ{V,!aq:n _8hM O&ѯ#Po ad6nSm cMal0dxWp$5)윑RFl E'=wLp+pF4Dԟso_7|J;*Mt5{a>R_`qb qL.BjQQ\vvE=(.@-wonm-MChA1L(= V!!GusTEς%B窽 5R{M{ K&<@b÷WWWi' 0^t|?^l^W !hSj!x~̴=Rrᣊ-$_'u 1Y\ÁK;\nR51P hfXiU/iK)0%ӌVS\0ɂJYi7WWW,n^p*aGXiQM6+#&ZM# s/GErۻy^a76RxsThP2kʑ&ct4j1Wlm unĿ@G02c y͞%' aCHgv x(pyv}O gVya IĎyc99wP0k$hsa8Zy's sQXBeXv*B^A/d#-QS%|'t>^)!p64AKbDZUƟ!O;gv8y6~r ֧ ?86>'8{v'hJiŴ}C#&eOo'9bLd|ɩ@rFK (5('8it.%a%WTl<F5N,k ZA9-٣~@R?ۈ"}Chp~\FMGp"{k!)+/ 3'2.c-w>?61BuLt 0uM7D5Z3o tEׂ|yD:~$sJ Ɏ$n8u/w?`}[TVAN:%z:3)SX %8jq`8{lSEV5 ^ś``D|JrFrytgIU t@jB4,k^c<͵O:/V_V4ٖOgioA`/jUK wջϬ{] Eɗj5e?h*-.a\Wn>|AzeuXC/3G^0&C. 5/YѰ-iT@"]@<;&-Yvv*̋ , ?J- {1[}Y[ Q;jrYğcmvW"ͭy~hT yG G~iq-%Z.5*@_@WOVMNjn.rЏ7(V}7=='Jשׂ5fSGiy4]4_̋JmSf'sMM-! <@^Q܎ 7TVݸCX 6&Z,N}+9"̊̾tiMޅrf1ХۭFSNZ]fDfplw-UVS8FʝrXG.1#ؗW7T"N ]25 i'y x+Z߲aCͳߐ|)׽rMԦ粈ڌ# WW13OSqHϛND7@WRnTnoTMKȦlz1=sXE0%*;(K9 X.;LYa\shS6(ԦՉtkv&uE\*1lm΁jۊVo+zmݽ2άxGD*>TҝGTCf*#8W$ሧ\E;2X4ן'>.^{H@dFXNYB") 3Fwh <%Yq]K׬C'8}`|ᰟ>ق` xS_pEUpX]}g_fp¼笒`L#TU'*uЄ[$R tLLXMѹ' ΀cLIJBKSK4ňX\֭c3#') awĮ4xϿ~$(P3~L?]n._}pq>ޔ~"4\lp娏Wz?`;S>\\y! Ϩ|v' ԠfG8硜iEMЅ1.2YkЄ[BH% 8C{QSIE&DP ;QmY/+POlEEBߥfo!)5~PS cz ZRV{3[\j2N1@=U4.R'QZ&P &)#zBd6wC8n!mZtkmxߨKc |GBE?.QΤH%If1RtPe%J71zQcmw:7z,&pHbaʠ\8o: $ . T}=!N|9ӄ!N^ ok6KtQVCS;㼇`Bh!:]1^=~3볺Yzzέt?wmdG͝2*f=YL H<:. H;Gcduc?J^/UIJBx .wϐIb2Q#-crn/엩i&'yay&l.Io.? |r6cB>Z|Ÿx6'In}Ro=}\VlP%$ 4NѤwȢĠ j"pIUؖ"97ALU7$UFG!6{(W{cm ʛ)HV w1QA!H+,_/3$!Db ( h [Q B]%1tsHf{OctSscJjK೔[']1&AH$,zH@AUtS8@ZvÇA8':ypF;0"R[2&TEG|" un 0fiH|s5ڳO0rf/|BLlTx}a~9]ӂ/if(Đ͡ ) *6K$f8XdϢ͞Uyizc/ $ڲSGQݭ~w6pCcQۖwӱ_Pw.UJ|>qlv ruz)8G l\LgC>j^8[ʈ]e˔^ L [O 04%UNT[YeG04h,Q=}- m8ЪIŅ [!B8l$MY&x%l$Exq$vmާjΐi_l>cV a6g4Z>ˊa3ђn!j v{AIdҝCvC zs{(dNhDc݁:09oyl sQJ-gDIfDU-hVH4;Diו^Nk.l.o`Wݽ.S&l5:+ zRY':.vERNFw2 'cwDB>. Lj|kuT$jd јЊ[ HzA2 l ۏ` VAg'a% 4FbDqwDT"s@=1fZA΄[] z\#?r7I') =Ц+Ҡ)ycSo-$^ПifUs2nן03"ڌ6)$>c89'3 GP*rZH䊰!x23J`)D$IGn4x0Id8KI/PqY& 6J5UI1cƒGKYhŒ4 :7udƒasSv *Rus@tQ""^0}X;cJ$5#¡nڂ[ٻ涍lWX|@}Ql'wj2eW̼$ Dr;5$@64JLbK>KY $/]yfGr57KI0%ǼJ|3p)_as|cf̮mr= bJB A\0\k/FJ 2XyD\O:Y(P`Z25.7A^[vS! {ng] PVV OR2Fr@`Z"GTגAmq*Je#_;U 2Cį († ϩbXUu_Bn{ ;+txbb= rN<%A)uiMNĽl7S}"^ )v:)<(J[bNYL E,FXdc*܋3PI G#C3F=Yș }>+#IxHpm#pµDj׎AX5*V즠 ߞ IC(Pi (P*jr]~ |6GصHǷ|~oӈ;>Auv>I ,YOғry."lK&åt?\4;j\i=yL'e,_md/N#q>糇9>dId倣y̓|l8RNTDfw_VD ⼽&+ HEjONE'RBfZfz- ^\r?F1ߞ^nx̨)z-i2e486b"kNCxG8Ww͑d_ysܙP" Ok+c:[}RG%)C)|x[]Qdub  M$e#NhaX$d e,A)6if-u)= v6e.]At%(2}ŐĺëWKI D%i8D~>՜ KlWvmu Db&&r@dbCْz3VR J|RZ"պ9puAϒ5/fxtF^|.?1H&Oڨ=5\kfDo;6{{c_WS RxVN/Qk&]i.uImu=ևxOϛ;&dB#ԑEcѴFBC%;*sΕ5%żo0٨dmkBׂ>;ռڽ{"_CRC`{{QkDǢQ`5q#Р8;AQ< 򊴮A;=1 VOqH+QwQ# _,<B5t"y0/iԿ]횼+^V rzR*$jy_IL3kq3HM͐Dj3 hE!i#a T]+`NKBERN0P(<'}&, x! 2a=k`;fxȶ~F 5a^QưS?ٻGiߺ̀ J\ *#]wOő@~]9nw`+% )$#q{1 ِ)}x ^ OY oXw^H^ P٫9Zth/3S a.L_ŧ >q7CP!+iB20M ϢwcMv߿HLdaYATItno.W[>6\)DQﬕQ&Q8J˨,"[ʬ`vXAUyi0qPks}^(8YhO`'cKdO?CUۏ+uWt<}eT<""H CIxmҠ@^S(],t=EWsؗ+~Ng3nô?}KMth+U_R'#p19g3 B)֬GsZƱ89~98AРR|;x rr]-*֩\z2?19U\x.:]Yz]BG94띧!m u]E1H3D"D-(mc M&Χ~&G`eʓZ|D\Qնz 5hifPvR>kPm!!V֖#Lc t /7(#>!gal@"!::ӷ/0FG5HwaK9YwIcF3bpL(*P*)Ģ[^úh:z ^HWy }Y m;!A5\v"Ӑr=74٦믟.naH{` =n Z9{N筧pz]\5}8רxQδɽn97)I&ukM5o(l>o`RBb~tV_x >ӭ }Ou:*/7VB4 UIriO)uƸQ8Yi.n=]ЪݥȃM[,]9nܔM5Q1YQG͸ʲW_qSNfYNF^O!F9^WU]i.NqY+~M=?? }nX@#Nm:Ik_3xV{'L5Ǵ׃%< Qς#VV͵wR;ӏ0vyQ> w#W@7|*/.\g6f<&ND T)P0hK&{x&B{ Kkz>OHfdF2P9؟8u,# J KO83P],?0VXb߁tk6%Td4%1*#!TFA:2V(U%i$YS3A,-%X$Z[Ibeb-bSNx,)IX!%L ܷѝ^ MwVT<1jռۮU+#&mUm, 7$Q)c1`+nb,,ՓƠ+EĠXDBe1.$C5ŋ}늢&0BzQ(9#B v6 WC൦a^7^ ܀FzYfi/IakߑD~b+PMjM QXd QqJ YF m H*̂ VQUU~6F&/1MAX8SB":FL`85j,cZ׀%F=oE/2ˀ rAQ1 1M]&FV]hX\`o 4Wb尜o\^D VZ?O޿[}>w,'l-gXS𜻇!Ku_Bt<_q%ߡiLh·wWS^`E-ćx:Dc~摂*&$8~#D]HHb8_|<{+[(9j~B0 R"<}e7 &;y@3i3^DmI*ZqQ-RI"qT0̚6lIbx!ւ˕(ilqLP#D]mF掩 %m;|6+J| $m@!f\!]Yo#G+C)~0ܞ݁a3kCȳ3jQCR 7HI;YXEddDd͍cD&L(065M?oSzz` 49 k:9Y`[T=Cٚ뾢UȽSR:nr/3l9_$3"z /X3s7a2[.+|>JHz~*?rٸɝMmrgo֑KaT2~z71(^ZA*ޅ%R&MS,@4a~hnYy mY#8S-2*3G 8z4"a#%xdX*wVK@5 b QPq8kKc|<C+%`:*)52bn#4%ikEUU4E6s3l/bxvWʃ۹Q AyRA2g~ڑИO"[?lU厡*^beP1z*^PL5’֒KtP`Yˬk qK6xk,8\㽠 eS;Q#F۶' sS bpD_t%ѱ´M5I˘YiP 3u,xɰL>EH`]d1"vd.G FjAj9H1KKJ֤fH[A˃Tظۼ^}m@c0¤_:%qіixB (Zvqr:g*B.=g_3A]gyt*x/ @%6;5T r!X)*ޮK9+ &0l.YA ٛͣofUryv˃Du{iP%ӱ`:8A4Ϭ%`JHkSsYȞ8yW`\Zpj=2^2(Af0εvt OT<KG`U;C,a6f7O3ƔNOC&+z$]o5b&{# c.1kd7UHhiͳ  ϱShkcK]fv.|Zi){QG950Qan(#,X%\* 3pڥ>S$Xv,z|A''Zh4z j8)&)m2Sx$^B$-hAE6(XxET*QL8|smVX7uc[>yYՍ^N M#hg'Zs4T 8=;3?)Z&AS7d =;Qk0BJgwTm) 13P0Q)b"x&S4ء H|- n];$ew2 :B5.h`G x1Vh8"׊qL #YM$ d^TНl-#q{Wb$k՞&(=P /\~M`X|Y4vIA  ĔƢjo$ :R2l/3HWUp$_LFZ&ǰ|[TWO5'+0F^#2rsn2O^BHfttR1 \lrZ 6X9FwpG&!=HK1Pk?:eSJDsQ"ͻ:1Jux׍ȋ6=w.۴ϋ#nڏg]|Ou x|n5dh|#RY`{ꩧ[V v͞8ޭK{NqqD,TD(ߞwf==Î8F8(_;qϨQMRjs =/J]Η -ƱT-:ԙ1 (em<ӷ'M-r)&c,:ů>UJL 1Qk*/JdT$= n܊ vZR5LK_); (R=QGݑa\fPIzjT UM>AUM(^ T TJRj@@ZӡZ0Ϯic>rA+XBxIKNצmi)nԎH*kjt-=wjƤ-Sxb]f=ʸꔵuOAпۥZ+m\zKϓM$% ]_<؇~?|$~w;f$ċS~>녔JתJ<лts?#лhS♲[ R/3c{;й.w| +$+"UV$UcnN3h)ihJvkCBr-KkEx{2Y2̏70!y ҷm*_ Ѳ2,n}>|V#w~L3h;\~"T<̼,||8cTSwu"µZNW Q"وj3!>P1CNjÖx#Ĕ"w^;ZrŦ /aS ujGU؀[ A\dWTh G |dي+u?_Ujd ܯʘCaN[/t]ՕFk"0Rȟ~CR 103Q+"sܵX].Lr(&mE?|btTa+s""p=$ WߦK1MXs?*bs XM7R6a^~qE* /ާR:G\! \Q=J3VpWP0a_bO|պK{=Zç)nʴQS 2ZNOT;͹Su`?un9M>P= Cw?JĜ,&qA 7k_=-2%ӥpgD?Fl NǏd}* 3_3ߧtvA&zEE7|/wsH+z;^;k}ѺM3|{dRn)|.MQ"BZ} S P<\b0QR6E < [LHImX4\-[oluvp]W}U`/-wu.;B-4>k]KBl`t[<3Q]m_o `\tWlKYvIs|i@A+^5I)~~8G%'-c+De|L:-D2e= B-۟Rm[2YAZc֭bv?r/- %C, Z,Y˒: 5RH˩)k#`j[2Rة D;&qrR5T;re#$k,˹sO'.[<؄=Ѿ/K{ZIjBzlz֛ݼu}u_>F,N]?P5G|M9i\[m48ARy1V1%cP0\p.[9÷Em'?[ Ef.tk] Q z4LiGzHKPfUo1N1aZ"ݽFZ[IAdP siω h-e$( pL 3ˤJ qVS]Ah$ke]R[(^L0J<gkO4*p F1D0Xx&Ӏ1zĈi%W}HVl=M[,a+Ǝ V#6'A%8 GE[2Pr2A}h8%ܚgI}<Z@J_S;ЗS??_4%0xƬE.M:삆auŁ^M/`MV44R#xQ ]Ƌ>? F.,hƍZVF8'4qhF3h1(~Z1pTA7B&媋c9"!+xT]'wy_n<4;JP%ljwUTk,I.O\#x_RfX$]Aa~I'>,w~r?_E3)ztgO?RXS^]䥏oj4fZzϔdsO^[ ϼz\PO(Z(% +kkGBr-)^_Hq&QgInX[' snPK:ϻ$lYH*%ҊV%$23 $ŵ+ Fw._F+zwXNX9ewcFW26Bxӝ -.)+n/lIנ,RS>*}F'}@xYoNzy{ކ|"Z"S˜tÞ(HsHKW i@qD)ArF hˍIJ"uP|t;4VU2:]u&*pg^u_뭞yUnvU޹xr)Gs,dWDKT8i &(0&V:[`PB꽜 HcgΣ 4ḠCJZVD%G4 晢:o`hbp`Od)dJR1 ޙ-tsmSjFvGwa,ۣo),juCkn TIԘX>ʿzg.sNK~Øռ4֡1~,g=-32Tuv",ޕq$Bˮ1<"/zXXc13c^vBdK.fF}f`ECdueqdjNr\ۄ̮PhW(hcz*j, .qF8?\9XmJ+nCQ``ߜFj7lZ`w&뇱k,? + c 8z8Mz+m҇8Q&'7 }1]j5YZk|k0[&ӵSxtk.:wfxm1R^PYh>$\y*sQ-cР0a ( uB_Ϛs&5da{M"H` $*Pc")x H 1QOxaPA 7 'u⅏\/ܼ"`fph#t Lޔn9L,i8F-v<=(I ͖n-pW΢\xz\}[5aQPJxAv~i7ꂩP/Jt_Q !9hlL(^8v{㙅VlvP=k3#DJ;}55-C|-1a=)B qۉ6g _K!(JlQ5Z.S\  ]S u{Hv6O-5.$Ӓ.6=C)g?d |.sn.}fGLśRѽX.,@pыT(D&"u_HCl .u8Z;"Zϝe)lf\NHj!AqR*9:ґy(#e HN(Ag*^Yd8?CjnSh<)fO) 4ݦi8)xbZ|uH:+G1`ua +&s8"9"g:h ̑ ӀZw'yA|Ir*ReR#^BB Fh!1YԖJ$^.x6Z+hhRsU\Ǜ7{Fc}o'x(S#ol;[k]\]4[QUI/S8m'1'-{O%l60\7ո@0]Gll[ZjOԼLt^T{=Xє#;wm!F׼pS>_k`j ;u᳔Z ^;Gl,&(SZ&Ov.Pơ' ~FV='k}ixe|}sG3b=;GecEu(XX"i X߲ eSyoH uuЉ)KxRAx'-h# 2LJXt8.eE$Ahʿ__.kK\/?ЇOS^*C")m*кVqR<@#Xٖ`^ Jy08Y?0l53I-ie3"XA߱dbDE 姎vOn__:DK'sUcMhA-p?60%R0g~9Ezr𧬇B?gA]~ڈ/}ϳzqZ>엜^q*_S0$4Lb叽ݞz䌱I|BIΪxs9rC/<a"9宜}-˚ǷA)s;KuEc>L З{Ydk-K0'X)!n9[#8J#Wz֓N b1[PwK;=j-ffuu[m=^*n@u/]瑯u{vkPENwIDrդhr?we>_y֡}|hY񯃿`m끳F>K%>.A0١9=  oC,"Z3N'^_cY>cCq\ηtѢrd4J:4$+1˂ڜ@Z6RJ}8E '28S."}ZE{iLӽ-:,L:dS9.NyY=~Y/_y:psq@[o> ogc!gfE5.h͗;@zfN/ h\NҲ\ޓ4#k.Lviٌ`]e̽HU{+k/|ɶcՂJ: /\ovq,-+Zd35 %楻"RփyQ1[hu\XCd#5٢,|w-*fzqr}zͤ*ZU8h( d,k}Wz*rpu?v ehˋdk[p"xbpQV_|XbtސB8u& c s>74_8"8 %GC80NO78Tσ09[g #𣤉 60q㜌bRoHH<#7<)Iˣ"J&L*JԉH܆XNsb(@Ѧqe"5$_JA81Pd#8~kA4&"8Nokf "JxЄ&&:qA4դzt1a2PdBcqPCLx i,ɠA5L)XP:L!2ǨO z7[${<_s64V9*;4?m7APSǸph̓(τ QhJ,Meh0kɎ|\ogN_C-"N=]B +xr25Q_/ PN7 RfvqSyf+P.wyL(w~I 'ZRH .Xh`SNjtnbu\9OoXs!?GO2Gը 4 G\Ǔ!f?-V>]``Ee`ޗv9Xro7( L%rRb_v4m|{?],Msꦈ\ڭ"InnT{[>+Y9<f\;9[@D Dz45hN"u UBhDPƼr9!!fXD1䌌69?()8t MhkJZZ#CYʞy4]$ylz{G9bWi@ "ZuFFY$Z#]K󨠸NQ0"4`{6y]>zq~t%3\ܖ-s[!srpYnO&?igY;W)0ct _+ ɀȮM aR}<)ߒZ =UزSΜKOBmEQYht"@p|N%(W42x]t4qB_^0bEP5RJ Ҡͬht6ͻ덝EɣJdJ`T./ㅡP92 @C*pM{tT,MWQXJlaT<$ eZř λt\ܼ"kqLapڑf6'pS0œ(ʛ?B3bwU Ȱf1ށegh:bG-XAQKl _mbc= >][ZCmBR:W8J [[wa0o$>GC!'? 4NGIi)R"85#3(iqFOչH1ɆDJĔBz)RF Ks3Ll冕o!a%x{!.!zpG`?;ܺtvHU!BU裻*m~mI|DDgW~1RhA!2=AC dDwB@pJf۳Ȇ6*/tdSmv#FU(8xY89a4 ==` ZX .=g CB!# ׳/(kvswUfT*>~.5[Grv0+x=LhE'L"!ddt_o lJ֑ F U֜DaǓEKAq)Z*$p0SAFcAc9#!H[gx3sLuc$-7H`BA-kš̚6R_ FX &bj0Bq)V݂jגh78[H$VᾅtEWM0JE:·IBzH^G[r\r'.6X֘}Yкr|y;f.6nښә#^|ll\޼s^cL~(U!NSأN>+z5qqrL *cDiAt5;?L|UI7\30^=S-$l`.)mAo bk]q0.'h/\,'O+쳄 A]TS!柠WvPB؀|H"aÒvAdkE8ېwM1  .12 Bf{k3{S}ngߟ}o!rGZWzzס"e<~uFuc!͝wm 1N,E+A7Mj}1HFZTIXIy*%nٰ`^& L|o8!xvN}@ 1a)^xoW7/~ 6sY%F@?;f /^Dr7_E{.Ufχ^U^V>!K8=6e`2xPĒ9HP6 >1,z4_vFcr_RJteއ5| ;F Y#l667W6@Lz"&xz,;pߚLJwUpሉջ^T?i^oh/כg&޻_T!\B)2WC Fea*L= k`@ >t"{ߎ1y#+Ҝy#791%o_4J$B_!mNS'h65ek(R*6{>%HL@81j\#p'Ba40hpEQ*sY(&@q7CI5iE КۛC&沑 p9qKA*~$(@NI)颓5wrSPѵ:藵oͪr%p)  =vzX |L'!tio]ۻ/=[ y&:ݦ}aTѢ]F !iMJ/g4=,p*wM*N$H8: ~8QpNpf]pזs>]])t E)>$Ȅ=5ES!hXuURR`FlX/YڴUĤ^ ϡs#rB:wT1R zVxΌh q RCNseh^ ؝u P vnJ@9(iE@aPcfb69NXq]5 e]<(֒֟\0 EP"RR_f3qRo֖aKx2&yRP N} 0.|C#K|!?صkgcƮ6'B6Cq $uhGSqc?a%D5AiZa*dM("؇O7^)!IpLu[A*dha-lt%BnL 6RKJnW9ֵ( 0&!ť; A@N#Sݜuyt8/bup>%rI'_Λq`~T,!| ]> ÖJ^C'[%"0.Q6Ԙ{w64_>y=!P`HL8i~nbQyE(1pN 7O~k ž k11ph2mceOe)Yo5iIJ‘:2qPT5~;Ǟ J*/tdZ=$t\-ei+9B NDmHc 0AIVA}P+*$nɭz!8CG~|\q{ ^Bt)[YQ@t4Y19Zt;b0+&'uc/+8̎8z9zxCީA~D\lnͬcBwT7~xX+syPj+T"Ri:ra9 {9Zm޼~7@4mhja_жX)6L+Sjp! ;#-6͍lGpFZ )L;qtX{>I`Ȇbc:f;Kzr!0ґz4t!!BZӒq<mV1ҭޢ~ _(1*8+kW{GѬڷ]4.3{HFb6pnB\Ͽ~,V|z: ŗW2 4h4hY<\vG#XMN:0Ac&?%px4&^:8&V *Fq@pRFlv23+P [U (3M+kc`0ľk [o6-¦eڐд  īG<!5G *tM`T)BU @7q풗Һ1PVӖ$]x"@k  %HC4pE}" mGCc|Q`g6J@j1"k1nk a)ӴX&Ŵ=!#;)|!tx} ;f F?NK͑ σ(Ó ZxT<|lnoT]V^ 5 i a~z*ݼ[V$<_ۣVk?عop]߲d@i2/@5?bJȇBWq^2ҐيՇi^xO=y)^%pVdv;Bw J1#3oޝճp"DNSDztif6vjrubL盒Lh0WЊr'PPbKm1 5V b]Q*ikte>[wIH DCgP'Bk®J}}q~VϳM5]пK#8#P Πs0*_\X^v.Sš? xΠcp2Y ᨤDVyB%1,m% SwyxBU(L&Chk!VU\hL}u MDB*,ţ o s¤wҘ6`[K7vw ,8[e Fe?`vr__7FC1 1+&>-&P:#Y+15E!/%]_S('.˛mCRYvD$}\nv=Tr8MXG',چ9<>~A/c)[~RR[m#=^_1՛ݶaq%b 荻7GrU ӕtt _7m%r]zw4g벙g]uѵ=?8ɺgMg_)mfSY" y&eSӬwà}B>#Ļcس-z-pnS 3׼; v)#Gs Ξi0 IA0;ڏ2,}Ӯ*|ߚz?PV{4u|ЛM~B@䣭ԍ{cu$Fz% )LÚ]AkY,wr[.:R[~fyFɐc?/OhX$IB/3Àn<aO<5<-EX"%ɢ:H0ږ̊"2ѽ؅[HmvD{me ~Y ~,F/~Khpi}((Z ƶѹ~$({Ө~0%P:3Mc/5^>[VCZ6:֞q%13m5"K͎N[moClD&[| xP8>`._^a*ۂ^X혬s?W70h !Riz}uiQ6^S40i 6HwB*< @?/N_zjM4I83-u4R:B jt8i R e:DFV)z X}sn*62&ҎA911Hs- !yiB ZSCq̶e\M&~V|xnrȿ#J_fzQ;?8ua |'S\K.x%R/xϮHJrD㕊. jv@)AQiMNt?z>Wp#~7oQUwVO>dyPv-X˚_TW\MԊ\d4(3Fy@cokN&r`u&@h<!ag̞g;x@5%GXCdݷG*,UaM>4NbQ&3oW!&x=3>T~/'aV/Vx=o/\P*pCr$<.( +{yEwaǟ=DP'A noߟNS¦ 0#W2oPK5~J՟ !?\ۇKs-6ж~/ߞܼ#eœc`%-Lϥ>0'_Mwonc3W7oԤv$J)^@#p˕51F͢ pΥy̠7-KNP]pѹ=-k(`c慴s.!}Y&_ǰiJ?䔇urp_?fgWb苼܋.>x7c/WKfey,qeׯ-dB2z7s\r$(uz_َT6L8/o,S Dj50ϥ:*HmÓL̈mf>gs`i bLqnNk>X-ߨd4Ԇ>&Cڼ'X| 3)MieΛ1]S;5eO?u`c,G$hYȘ3O?pexE\gbt^]ݠXڐ%^\8zidW3sՔqS'R`)q= LyQTpjٸU,eVx]\H-}H\.X -a4:u F:$Ck guR-=']P_;vT^I[E$)1I$ 2"!DCE -eYꅴ`vdMu{7JW2Sjވ5՜"1)GTȵ  DSjØ 6ijZ }X{Dw5AI G& +npiI`1`)/Zz% g/Fsէ9٧h.a4P=S|x/=fU|8_%cCsF`Sh ݿ>-fP/1r2 3XU2W31KLL+e+PR"rtp6Ak }pWx ѠlnZkʹs_xEۤezb^wv ⿮B|:A M +`E` U;3hb 2J.>YY,@6>ghNŤ gl䲿@@#=x)_\nV$*dT dBs^9j;i2lثi+R UTA$Nh!2$<AAQ W>PB;~;תJʯ >q .єjA Jsh,2/00T")FRI䈚C/' S%1%(:,j4x ǂA"zTlv4[kK">AYD++t$ I$ \kup98b1IJ-b:gtzTsMjv"9FRG8"9F8a \ӆ1,_҄h3dY׭|sLU.<*UzТ”: S Z1()Υx Scä2õY-$3“ǝ R^*L-/dBr2ߙ4zBwAT*VU@i;*~vUA`i\yYD}dRWkf{yk%Jl7kʆ0o-:q܆-gAG~H !ξ4žWR +Lj2!FW)w{s' 3Κm\ղ[;X6o- [ok>xWֶ2D +֓'(v3y,E}241c y?*+GB.mz߶0Q@0u7}?7~F߽m{X ||t==5&-3t2쐧f[!&^R(;*ZͶ EnOj%-޸}x@|Oq5߸lx!>t. 7~Մܮi{J}|GHd8p }yE0oL}˝Ewy=H,a]~01՚# ~W|t, 2?@^V. y+O9@S3)ta!v cPPG—|y2L{yU+}rnr\AɶC fg˭!بgNTξstPx_( s:EcZd`0AG0p u8L<Э}uFvtN=LAhVsxk}^&"<0.bעCB%j-(\, MZ5AS v cWoIW0-\Ws-u0?} Lrf;Uߏߝ1O=(&X^B~i(yF%V?~˜ JYV:o,fgO=ajCMyAS-_~S}xz{dn$pxl-=߅'C𤇵l2IvLhm':PǙQi4aq:-] CsA~7BQdٹĊF{ wunp)ŒjhuVaՇCu8yi[IL\s4}.OyTR MeJfJMnKãUiUuѹ-WO(VOT~Twm?@&;*٘8Jfw?Ɋ6 nMSAf`$\լȺp3 mlۓ"*U+}cFDUa5iFDzrEEp7<^ՈQ$G +k&Pg=#c4mS }jlj8.w&Pӈ_(K @i:o*#ûSI[6d-\ZaG2?|֏.df q}sٛߖQꛬ [X)K2V߂h^8 $ 1]H>'f/ z^ǫ:Zkwt9p;Sto.X>"('bo}/jk/SaѴ-' RDd%{N 1VV0hH =9j3@sgH6}v#+Z I_Yqnd>JœyVE+Q:IZϸ x& ne 29s;)VS/J 6)qTuQbDRz-ZضsB8U"4 DuzCSEr?FFIA{FFAZKs¤вBID'WGZ @āgLz.A)R5:{#鄖dtmhDR=dŇU7{ R$'`4)P-k=,<  ۬peVHr!7SZH^*v1*FSժB6N9" +">AGݯmػm$We[pH;[ (8ٞL+u{jڢZi`03vSz#Y%Fvҽ?ip.KFV&(Ϙ\!9%kPOeMv'XZC/*]^gB?ކsoʇVYdae\ *#0S#JH]u`WiZapS3|!VBW2p 4 |,Tp }Q Kv]9>Mwf 5ajY 4j:rsW$fW{WW67ۏ?N 9ܺ:a0ܕ\ygMyɜŎ96Ԫ,Jh7v*IcX;IX;ƚqL7~#qdQyWoCO^{ BVZ#Bq/ ,',-t= >L2Ƌ^qܰO2ݲ$qo4C'xC92bdar6n3BbHt%`tX>TE2 |iGl(sݝ`ס[nV*sj!'%&Jܶ!%_lg"Gc_Շt&Ai D/PڱX5-q&uRfڈ΀&kǐH`RU1O_Ԓ[3dꗈ?r '_{;a}.G_||> ZGfd>|Nhv~v䁵q 9VʲG u,Jg+f>y[RxWnGԬuU_cf()e0#x'!` ٓ;ax%Z5FAtB0`ӝP|7v\[|PPJqV)'=0q"HPk) +p^X@KQ{Q;hmh\ J/.`nNJJbkZD`p =8i3Fϥp@:ٚ)1xއs_ÉLTYW?z_z4_O >E` A@(wBƕ4\;Ji[" ooHeWk"wo\) >er62zXιx~xٓ?Hb8c` BY8%YVؠX@:Dh,CFTIA+* / E-PU:V ~i8bSK #؜`Yw*4+**d&>Ns r:U&)t VThHvAKK`Et%.`ZՇY`*1n󗞕DO(&TnyW^j-Kkj8itPCDe'Iq(5Ʊ@>("#N$MmE?;DhRh):ѡ[ZEPLƱj àzaGoyl|?5-G.Bz&K?.|+$qA\tvW]w6Oi4Sƣ<]wp믳E^{}+Y̆٣ygm؁lv;ᐭ;_<\T~_b> swݾeTu""pr /O _NY;Ybz³z4K0/n‘ ]<|wY=/FB `$mJN0n {=IA)ig0n` "Lu󈶲{=Ad'idSvwQ'YviS#9ÈRMQ>;()Pxnড় !d'tdFvVn '{ j E+E_BjeA~@@`RF O0?I@m3G!2yP#*[Axws/WoZH#qfZkz19> ,Qo]=»2>_-cG@C\?rs Ky녏?~01"pt 9DS[\s]25j=6GHad'g_bcA9djVzͫsmGjfĿmc'[NfVB?K9+Pcҕ_LdooEOX>"+A X^POc&X݃%~h2\w-]!VCqqM"w]ߒ}OۑL.( 0<@ 4?%&V[w}ys:X(DsN(*n<ʡR4X^#8nr鹭lJv`tݱ"QĠXe^i *T0iB,c>0+ K݁Iڜqx@0h@82QֵNGW2B/dh#:V*?~:4|Vn~k>}A5Ƈo_WÖ\pȲn_ j]4njuRD:Y΂r;o?MFf9M_F/ Z}{p_տ#5ժ@}蔯7N ѓ)O2]$sYokPApycR.Ӆq{a\Z#G?i8ba5MY $z}*5ݒ+Z?.^s= z0e5 ϙN|{INFOv2OqU,?HjCpTqtP2GW'wɘsp}4ǰz\p& Ycs(k$p>LN(/{'G!;E=$ -+ZXf $UpE` A@(DQWp ^BmY_n}jT?{k|bjxqq]OOwbݏ~S3=}4 b>t*&eѾ"UxUbeR+^P$UtpY*_Y\ZUz'\ƀADphTؙlR_N %\qNJYMJ"u&]fcDB4y¸  J 9% }XT sEzcyJe#^ ?HN ;!Ei3F Qo[Ǩ &F;؋:H8p[vIU n!PukG)[/z˄\OE[6kXl{¨}~MQx΂F9˥aLTU iob8|/jzs?7O 7)pG @bxzH"Zp5g(zP4Q Y[z;CA(cʲjFr$| xư[\x#6F{ +84OUH}`(-2-5Li8{؊Ć\y@Pkt/S:Ԥ//4Ų^kx/:˶}MgX!=zqwA59|ͿܞsٷETaݠzx &(nPl(wzq. ft n"DUہ6#xe vc,arBk"2nWL?Ŏ1E͇Lo_1O-" 7{=el|`b'Uk=.أ=´;`Tip̕:ϔ& %9%;(C(Jя=oXNE6]Wpy6 a<n12_s4 mWof $M:ŢҊ +ÿa2{JגӤ-,񖊘v phU',RjX,x@Y-M@(-IuiOjfQ# ;AN;̉H!N@ZtIĺ0<-h倇i'&J N[7u9ÀIg2ZPae^)-=3.nJ$zG)Pǧ=!3 iBE20:EJ{; 9Ũ7t9d5 _o.׆v‹2Nxra2GI*/YxSA Si# Z*^H l)WJY/)ēZ$.!WwTJ![D9ʍ!cy~V7m>d }u\.?rȀr@T2qk"rؖC_o=J(Pb=DA6Kaq1A22˧ftTBTRE]SQ֥sR^K{'wS%l)b^'՛ʱS&bTom~%I$e#8pHlm$$mˣŹbCi P΅`|fx~\{}GK;4=́ 1{#ksn+ POxy7#)5N-ƪjrX?7#ߏ-fn~=z1quNC u7I߫? b/h9e\d L^C TJ+õR'(ԠR,B𚴀1zZ!x<(Z-hIRQ2 az 7BeG']{;9o+XfҎJZtf톑[AaKҧO__֢[踒EFSryTRSo-۔ZQ9Þ6"RϽCUeɲ ڇͻ- l%NADp ~=- YEA5$z\VoYiJ `ǓݕcIxǟ*Q5أ!2d٤A5H9))J.<#^jT AJhSɾDSfv⊲;Tbiyqo𧢐Bŗij/my8.}qFy)> rI /.Y_.F:Z93,] ^<1_րhs*up Bǒ?-$pw:"dŭެB{ `?;W.XD/nK/f7[xrj :dB\J}I1 p4K jJitUl8:j3#s8_ L \rrUy.p0YmGH3LRrv(xxA睔o"J3+kc Fڻ|@mT= i".i ؟g}?6'!,jGnGgIn9; `q*Xj$BHVhZ |eCgl`,壽%2pP:ʉ-'C e9,IG9Ӟ͒.܈"iQQhY+)͗J0`{`< vhZ;0[Z[) Lq{J5[y#UE$F,j:xZP;eAUE7<>xŞ⡁Z K1$B<5gY*,'~,|µ݌0JØboɥ S q(/ ֩C(^d+$rHCr%N!=w`AnAip`㯕JױJpFW1 UR4]!QcI @);.z'8q^·F4BRr6 }#3bbĄST2FZ%,_ڼpIxPkCo PG]'j:T#P''v6X8bxv(ȀX%KBJDŽh,lTjk-SǐR:8QY.!tڻCͩGFZI tkR7B\ )^-d82j7vC^<5͏9&BSjڔLN IBJ?9[TY,+ f́w7PCHn׭&uY0JUCϳ%=c@.\(`Fn:jh?&!vߌ15GLL>ۅ//qd?ecU=.q7r߱<6Up; %~:90-qL6/TΖݯr]4OaWn(av 侣v)+7e74uv뢇ymߧ(IEe9VUJ *brINZNRr~$R%p)A^3>%gfI&Iq! ԢH\N![X''EWvI9evJNd둉9K~Lbi;N*Ii]ī*"Qӌ @B;!Ҁ(Lі- عa9.~}/q:BG[Cr{`CSgK*@-^ mݾIPs؆TYY#m,^h)s%]JB rkDb H*  լHgs!Q+TpX̪i I^3 Gu'ED>5fp*G2' ]/\ jIȃJ]OϹ[v"DHNϏ߱1OsFu{Ea,D,Qf߫~-퉝ݤ|ӻ3(Ƨk|'7SRrV5n|?yStClrL*齭Dpv$8d;]R$1`6CDKE@x@Ye (7֊dKu;Y%%qa/jn>!sB:$";LiDTBCVЄ%idc{.x5pQ^RO k qS[엷.ر1@p;j4dzd?Ǵj>/6R,YZр0>ZU8^BWb w񇝘~2"N'e@#Z ?h, *5^yi؊T@v>TZhRQ!G9az^j>4cE :#+'m v$ %oP]93FAҙi#ks -BL$FB1sD 8Ll9 >ˢ߆! C, 8zYZ!tk7 U{35. E f'B7UTkar&NSb i #Fs-aGظlRthJE |rx {`k0 (ъBn*m%ρ,v *^VBUB42< 1>(pǸ kBVy*z 1ˤB:f-5 j;nPl>4bvi -&GLK~9,94L"C΂OZeꓜ&T(.aY)<$FPEXc̈! =*sy!O>B }+pĐ ]9V` MBjkkn7ES[ 5Tl*^MM 8][Z\:mQ)E)ͦ׍FH /2w8|zx$nJ ^l4&DBY$VLsGũ µ61hIl*7Ks;(:)'cӓ8,5UoxN};ϯJ,!~X7ݛ+2",Z-__~jGNp}=xs~JkBxn6->s'6 )ӓ7 JzU}<3*r652Pz䔁3ƪ u tG~s5~sp+}ҹ}H-_+Y{Eݓ%<\u@Qgu|UFB*ۃjg)7GJ L (pnGOɩZd9&)^'RJ U)PKom+šYnиy,]tEO>8a'n^.?1xMi(y>WHu+ Ӕ2cdi*"IV8I1q,Gu&8mfi!7@̙:wokPmՍfo'",|)!:NkH|}lUla7,:Cc{]ͯxRWfIP~-MVh<3hW3HA}˷^M&P1tT\T]g@ T1 y26t܆hNTX'? Fr>I5۲f^rNJW(n_/7>׶Ll^&QZEm+2qT̝@@Op08 Y3٣n ZQtSK /%r5ZMQ0%M;+t4} IN(:!u䲒3ͱo;q׀`N%zO2_M@m)^?:ĩ} \)>o&E$A,$խ1b) Ӄ؂ݕ6Jyһ=o:Ax1b7Ҹ7f[*lQb+d\}o;3΀{3%ӟm|:[#C!3F9|D65osN~[.߯y?O2K_?'p )J{ɣ(sv>ߦ"2]4h^k+4G>W"zbp]M}ͭ$zlzxC070kYV &c;Q閣BZs&QzruE+QN) xtpV04 }~K 75&G @kp o/!ß{EDtH~ޞsn;#l}7߳7Z9r A? kIT]([sZAO(!E)$\jiXk&l&,5T&$lyT0&OYhRI<}Pju}p?rz'z~9A QT['E(_G{s>뜿é̒ènCt8՜ VS0/p߾'K*Bj`ҏ?#K 0N~jAӍnT"n&_㹕CYpJi_?һYK5o BSQ^Vz8ud)X>odQPJ#`zV*?} &bR>A+hv F6+$I6RltW;o○WpańeG`qޭu^m>:V[/!'dBeW)sW-ǫhIڌJ%g*{6T1)CvkTszwt5Nm,R9-Jm01Jӄc-/RjȰ-'Q ʙ[C +G 8%lS /k_4^F`wAW:rG73 G4p8^K<<+9 VWG `CP,X>,CcՉϱwo&;ܜvau:.}DZ5^ެ׷ {szOoݡ~-t\>l\\EYHO|YW,t$!g.dJߞoj7AԷڭ-!v&x%I[/ݺ3V2FOA DtD/.n+>j.$ELQ̶vGnm1s4nGhv`ڭ}}vBB\D) %t_]]9x5%{DO`GW@!YO^Q׶w Pڀ1gիdVBs]rW])GU+1 z9c8XsX {)kK@:WJpݵفB-"nO&j@*R[ / =^;&w{ S7W>gɫǩ_F>AJi'/rND9(zp$pX CݘԭazS.!˄%¦85 V)A ,"P[L3yp6@TvX'SR'%,A,X`̒,H5!غ^Ms8тZ)Ho>D77}цX.MXkt7]0[ *[%Z@'!&:Sʢ**ΛjJԐ*cVe#hcȌ iqao_f& fc A9RC_7'4ʄMfƒvNK \=',Խ@sBEm8 B8 l^d e"S!QHTN0!<P7ȭ(mX˒X%R_Kv`)SNcXgG74)cIbR3Y+['$*}b 77Ko AW0j3js;'Y1(!5NT4!Dչtx{s5OiMy?S3a9^%㨤fΠTL[-OAq (-gtfv/ՄHJnasCApqTnG[Є`g$*ُեkEHwEjyTr5U߈P NFcYΖx(#grqae%F!tB h C8)&5{Ɠ![5 ` V BRvO}f^ qƮ X'v־lvOID(TNX(`#,-cXK*rȨ\i%5^znh_yӌ d{^8Hԋ!-#`& ‡ 49{۰_(9b )9ƬWZb؟ g]Eڥ_.co:P(G-~^{@ M.Gj+Ȏv~'J%c.e~ 7zl@,gNfW >0&+zC$Q NIЌ8K_C+zzn>OߖfGAJQG>`D9ER@#a/17ΘqQ`S͈Z/K}@pֹ{bW!PS$\FFx-(ȅEa>Un0nDgq./72|i0˘Y)ǔE j+,I9سrB2z=[V;v()~:^QɾR K5sj!Uv] A㯚!H/aI+vS { rYӣ3ẪGk#׃ %GKAk@&+0d2@(1,!M/Z|q/ߑ3[ &hG"H\(g"pʑr|Z$sgsg 5 z r0s5O5G ffՏ9;++@Pxt=Ns!u9m.qy2iW}/A8.S 'sX$ptYlŭՔ ؗ/>]\𝜻hz%Rm7x_=}i~9/d}ې^R{mYIu9B$ NOj$;JKX}y |ʘ㹃m*`מ+ J؝r޺Ws4'/'}}M&x'N3c%veO"}F{_}F~vYp2SgIeJvqʜlp][oG+_v]}6''Ĉ=/90jsW,OFPPCGpKWuWWP99muyW!rf@OLࢰs[rStu2Lb5j_V՘X|q2[h1+Q4ڤkߔS(1kQkS}lr*haTeQU.4 nw_(5u3:x"?ř1oz Nya}XW 5g1XKFu߮eFIg>ӑDd2S踷Z0Q2cF3I;c"E >U2|[;w+);P>ie U+c$!6<h. Ly<2ŒVsؓ#xce+y#s[j{q]wvJ7}zOe{nYyBvG~_~@"65D c~Zgچ ӓpd w?tSBw[S~~<4aFvl7M~tja:;CN)U&Կsfˏ߹؀9> l-s2~{o( ~*5M/>QfHhN|v3UH: =iA9zm]a)Q`’XxJH(Fp;fFkg@KQ PZmʂ}%<:߯qmaژ ^߾ $~^]xJ:@w'烞@!rpPzV^M-(P/dTBT^ĥb߀bH.y05NLh(%J!/"؀  U\Zn68ŝ-|i1b_PjJΗV5^ Ķã EH#Tu)6 ",P IcHsqʄvOZ zH{&œQ]AlGxI)='+)vk86T y7+wI)+&,}1SHAS@c=!u\Gyp};-!ƨC/فsg g# 1B:Q[Oe^(!* }iUZ3$#ڣ#ؒȆiTf&f I~R7,LSD_(jYDɆm* $P*9~2,OpRfU ݦ8UFQۏCIFWt`L$^S$ rQ#p;@-$L"RF[D1|3@A+"fA[ AFJ%n)w;pY+1H *McKexY'_j@ !*[c~uʓ_]rޜ]VqekT ^Yw'Ϸ݌z2y{va~Վ)N>Yy=';M逢TǨ }#ξ~"w1f-zVK ^O/B)\u<ǐb.z1DC>gn~, *Z9fhdb>C [O_n1Nŭ{ xwCT]]=\A1a3&O겵CO"&0]cThI W7Pag=ޏ۠DB̩h:*-w5Υlj{4;O?O }{SM;evQ%JӁjcĎvN9O<$\ ),ԿYoҬ_^580ʞ+17wrG逶浕&_?OS,3-z뻈r9p S>dq ʹP]xr:v).EgnSM{L1İQͧ+ ۋV<ITCtqEw~s{v"Y1XcjwuDigH "3_YYIΖ{;4w:..VBqr:?OB\z: Mۍ:oӗ۹?^W6 /$O.]dJj(}^5eJ߾,0& LzQfmuko*HfELDg&>l4ܒIǶY[Á}r7Js#'t!x%xqWB<]hEL1ƑCzm # }yր-bl̾qqg v| ՕOgd޷? )ʟ\lU>[GA׷mP!ŋ{ElOި\@vJB @ $SR"hld5jFMT9Y)HԽ;mӄ±"% !( ZiC\]G I2 3AœDG<6$PHGN" ,EV;$i XP0J0Խ\5qg ͸PjD͜4zfi2BJ֩R&z=e J+#- ҶrR䫟Q}0n ȣ *챫Q >s O^”Ul,Y{d{v^>DC87JD /ŠFi庮{mў>P E%Pv5؛\AɭB'}B.)(N'=@Ub_V\Rߍ:RJ3q?v1LM谥$ɖ!$g ̐?AU@߻X{b9p}%QRňQ)ZttsH|3ִ0t[A @3 Vx%=;wCf1]N,WnT,nrT}q?^^_&$?j|R_.H$@I)_S*^\'ƯI@<.Fԑn__QN/G~*/`>#z} XkMmxZP"`vnl?Maۇ.}vq}SiO9Q"*G- ⤆"\o^9jL?H_/&w /{#n eXI.f1ךM/5jf)X~={1xy뗎T 1! r8 |l߰&˨g=¨Vz|{ ݹ e51l6^ ןo5e7l5=[hh!.XwY?}p6WrB gqP1GOo8Zvfhå]^sVN8S͡:`TqpSdZϭZpKzIi{c'϶;!P.>4u %N=^]_0AgxGcm7{"%"BkQX &I)Xx L"8r]h2O5ׂ#NOc>* q5(PhF<QT|r) ykRs[}2?_V/u]Io1`V˝9ˌ > 7|#3hoڰ9Ɓ\x&Dqr bN^r(xq4TP+V;s B4;*77[kfsw-/1 *jskBlkݚkeZ BkgdT'f<j ܣw/g&'EA( worX$96`kknFEsvi~qRSuN֮8S\3(PT.}CZE!0áŔhh4Xk֚I^S(.H85AXA,\r*,p;IF@Oe':7]X:^J o905wη 5 EGp^Sn.žvP20Jgl@8Q]tSZx)w~:0a mT4(xcEG o(=eƔޢ@#Fڝ6s"ň26#nj1ti_b`c@=B὇$1L[//Wi8k.pIA=đ'?ƓnObJQ*c[%cc8HA\J9.mHRwHuP!\Du Ⴁ3^1!=vaza&c2-'< !l x~%5L'30U`Fs@d%# VȒ~*^,Z*^)Zc)#5hH*uai8e2:.LUfr"|eDe6hz3,je/MK0"%cE\,_?Z({uz{yǦjRlu2g`<pٛٝ ?š?o6A O2 >}\9~ wE3D5:޺y8h|ЩzwB `PQzej`ir,Rt."R2cyC4gLNu۰nFn 6X&Sm[‡h֘K?:/uD}ZO|99j .'OkiMȟ (R0#k' ߼{1SR*sw.R i 4g`O VÌ(/% {]?\U{㏟`T͵YD- d^ZV8azEArl YQ#MNlQ7aW7WYQmWG7X?}pjɝ GuvA_6(g[񨌦[=x䊏SG9pc!FƷ$ S Y8UOYmHP:{WmB`'_<qCd-niCz\ڶQSFǏsnى?ٕ+Rn+,Q?)fV'ySz4$IIO?@KxYw4cκޑ֔p %`'={&$Nk!v{Ӝߘ)hivN 㻮,'bvыvyhWད=7 cE{(?y.[ Fq-  ǻy/L )*r]GEʀ~[~ X+br2v,b.`A;w~F7XI}r#v.R .) sS&؈XSpjLj-Gd|DGtSq9K_ nicKPF]m+їWEiyVEI}ix`yK dӃ+p|Pi91^B[x>ݡy>ВwYLz/XNRc9{DTWH7 ?/{=o""@vꪐTxwv;9XՃgc(Hjv1/VM_Ƥs:O;\Sސy±Dz8t(Hc/Y3S3*{e#I‰ڙ:1T\fd\Z7w9ㄏ4C7JP00.l`UknVbnzͶm<᧖eu,(XNG0&(Qj^#Q5;@0gd(bҔ-,~(|-+dŸa6#*Kk R:CH,i!D 09i ?Dh.ױu4Zy搏?3Y.Kk\]SQ#7gO[޲JLW)ոtFTEz" WXGBMkDb+ SV@FZڣ t۝^zXndʹqʡzmI3-( j|/F Mp4ԓ`}Sza/jBو_H3-{v>41bt(zEYE|E@ӪsCy{L CCY 1RS"X[ Hc3Hq7v-)tIEM@ҏ Կ[qǘ-D&``BmrRqaT-YHͨ҂'ht[r"z}y7ϋ <|(~C՝3{\vC7|_~ i!r{ݗϣ+\WgD&qsolXa{sI ͟w|Wܮn Wh=aJ+Xh}~G0|H~^u+$#ǸDuf^$~&D>2__rPu.gjp3Xyx1xWsww'{{x} ?h'Md $tVҋы˙y>뷢]'hTq@U`W;ⰪP 1uv} _72I{WYx|~'DBšD+Q %R\ bԵ3;d yd{R~X4Q0v_~$ܱ.jwVtp1PA؆_!)%LnpR8㘲ֈkTResӄP}ve[ċC{ Z*Y`/T2آq?QM% l7+[B*R8Xvhmaum|Ed9 ;^,X54!χa4F0vbrjwD Ɇf1ɀ!Yp>֝$vf T;pkk֚IS(.Vo4#<+1o [%LPx!ڒW O`*a ׊9!phE H(-q062FI\]k б|`y{HW6~0CjqɎieqZ&AP ز6-`5-˙L u\py$֘"0qd 3KRR2g0I,G{e,˄۱CRH9 .+DEC/'y&-wXj8BVHP,޵D ϐJtLi+4LVԖ<C@ar@Xi |PVwh4岅"_ H F>4P2l^UL7WZ*^&߯KFL5FLB/7pI%uLV:xR U̐oQ(J^*C3K%2W%bb"vϒeJ"C̒\*b?9"1C /,X82H IyotȉfnSw3O\c;s> 3^O]|z eɭ+Hl ~>s(6/0-Gϫb4;ԠSP9[ `BBF}9SY`."R2ݾyC4gLx+źes/5cD”¹r\0Q?C4)O1ˈ&x Tؒ q|M7LIU-3Ͱ3^N&dgk}[v[VQ`l(+wg'NX2B VH) xϛ߂IP-r.R!0甒>6nlz%KXD]~ג6cBE(Åw T,rZ' Rkke& #;a¨F[ v^"Gkʻ"ZN9'D{IhihBw*d{_Ӯ5@"1S[A&L22&VYyZ(뒂>.޼g ༤fotXg8[Mov{j$ب/wm.՘=WGW9+<|=O% 5q $~8RF5KEqShC:Wu&0ƪb}M@Z tUbbR{Lb*"5FcszDad|}(ې!g>t0Y! dNghでpJz03͎ MQ7_8 d:^cLt,p*(E0&hG_EAtu-8訒>Lt=I"z|t(4m5jz A,NZ'[]/e=psWqd N;C 2UBbg0. +ƫ]{f5np wJr_zdx )ŀy}4Q4 )>oHVDlo  󀢏 +0@ʮQ j- STH`=9΃2PIk0< RԷDMDM9N rTS+<+ 19jKUԀVʅnޒ<M!U+035 p(|1$Xz}]X+چatz!5:Tn hA ~#ȁ*\zA`riX1!Fgjmc띏ږR (AO6C!h{Ѝ{@-7W;Kb{SjKs&AtSkJmJOZTCZJjD{wbvq(p1VҚ/s`QTz'f=ꟍW/=>4p߿#~EE1[!~0=<'B^i/kwCq0~O37ffmR?sF V%T3EқOOn :bc#e~1,}±jP$eD4wQ j(mTHV -̛t`) /͏ތ )u8cG1V~r6'o&V{8 ~(d&a~w;C *P9_f%Rg D`mEFgT;GS*uWQ( u&3 6FFĪ$3!(Ν"T* WYA=WZ+m g5s17HN¾x?tM[ďLxH>~K2G'!_q%gޥonڹ!M+=(G|$EJ^-;~=+e_/0D) Y!+A i!#X|]_O8w&,N7?}~pY \NrǸ>z2ÐKT*^R)@q^ k'r,=χ+|~Wtp۽CNt5P:0 _>̣\b6X,,?݆׮NjbAi(Ҥk-ڻ)S# T =`A"qH [&ȵCkCZR B1 U7fY~R]P 4Gݷ^ͨUTb5Ws*;tD:#>%RZ 9z+"X!Msr0"ʭp0eI Po.onɼOBaoãW8X*4K+^Ӛf"=,%8qnquy\) " \e|"^dGۛk4V!'ug@`b~5ha9?_|u R_Q|(Heu㎆{ J$YM9ӓjyKd)8OHh$WBi rhynVX)jEД3iyM ѠLLέTsM=X.xf 9E[CKaݗe&oBFe=}qu,Ao+MDuQ)A{aG{aР_^8!~/:nk@Ө-%2L p)kDŔ .QV -*x iBCIcLآڀr4$wվyqP&z99+8m9q'Xq[㍰1c(|+;:JR h?nŕkW孱Ѭen~j2˂=2yruw7VRYݧB)ݺrM/G7{J ț-|{q`cI5Sk ~J z%A9V;(~a*<-1牲.jULQhU'^p~?;;@mXIOx/K\w$!\D[TL.Ox $Lw TTuXvMUvѦ^{͗-d^P*FNLu&ad.'ܒK/P"v˼1igQ`#{x =3G 3*/]#U~o;~I,DÕ=e?kjj'q平dj̽Q rR|j[]"Q"S40͵h7ԌpmWF}&D+Z.tغHaXS˳Mc!B2r`ҵ_yo"z+}3IwisXꧾ2; Buڳ)8/oJ!M=b}&Ch8BLUͅ>P INK*r*$fq 9!H<1uL oQ*0Hmr, Dj *gx<1,)TRԈAlȴ=?~KHbQ$=`C_n γE"xd٢:Scv<zlЈM/sG=]Fľ6dr u_ϒ^:Ƈ|CZ4D9hH+a]!<K{~$>Ož|ǿQ-M/TgkJ{JlMUc sBODLr4+9dDLI٩tXP41zB. =8a86Cʅ&Q+tv\`j&q^)ze*8&屆$4)c<1y0+ ;J':j S>E\H{M9%N.FBq]9? Kb̶.෪^i%X?ivY}LNj[4q n%ĢZe{_L8>W=d[ٝ1#mlUjJ ;E0[^k[-fD8R!4.' KS g S DQE1CaM05o*ƌ l/!@MEӯ靦q $"b u^Zt{%V3N:F2{ PthaU9XaA1ᮅ/i[J~zhۀbD uH{]56@r!hoj (UqR ޲۶}[. CN -'BtjAUs8FZ/VPV=5 r8P*p[sGU,9%*P2rnLNRvrL$S F_.he8,k+(GRR%ʜX?pXy-ҘP;Vq|+ƴsjHb lW#hm`h1E]1%R '6bn֪̊i6 ?{F e<VӮ7u\vk}OwHyIIc(`^ZjϖF7~])lVB;1 Lc6hQ'/P%95J=O8hI)ߌi2nm\Df2M!6E's'^KwIcj`]浜Mz\N5cLrMMo hu/~mV]=wЊ92M4i2چAoh㡸A;ihԭ웽|83.zoyV@k{oXwO_wOzvS73~}Cn=306v['vQe@d 4k7CAdS#O]]j_87tִo8j2Y4)&&)4̤8/s퀄`)鴖GGJR'ȃKz6>:<io?uCDaz%ٳrWp:X,cݵ!9-;"Zoό 1F BAm Jf{T 3LACQBu<@J0\1 K UT^l^⬗ l~q7w_݆rh-L?',]MݞjI!%C7 54(_~RLKg+T띦y"~&FUQ?~*R {Xى5]5qFֹ!}q7\ P=}*2Gv:.-GgSvcӜC(i,i9l`Q Ў#I&vKpdEVwsqwb>q)i˦'93:62:3.lcs#54Ͽs&YkDDAP>?Ll JN+]xԈ,cX-4 Cs1R߾ҋK8w7gwEpM̶䝿S$DʇE?o>οYƗ0_gP JvW 9\F1.%zZRB9q:9,dʢܔ|re! P^ǠPKwyx]]D9*AܴV4cEyw+ϫb<KH=P *~΀թek LJ}9M2WNb#+YR~PlY*yԑr͒)3+FDW#j\ RD'w&m@J"nn[E4Cd\iTUyTjkD햋AD ?4 [凩$j.$䕋hLE7S=Oj7%[.);F6VQ[~Dօrm/S+upq\/q =gw? g!㸡c.>_j]_C IwO"%eyg1τ%~Vxq\08V#v&M^wiU7.($V3*~\4Oՙ,x,+ gSE9 )Y_OjЫ01' [@$Ҋ_UN|4S_jǁZߢLGNpfBqg(鶘ɜ2sU[f;O' !ؚ9ެ.ԔL +UOtkbԆ}:l?X&jG_|p_W/y8T_Xy =ĔPٵx]vDY]y9Bǰ]ՠ ԨmڔJ391,PkMmt&IgYaõNi[uJ ~@ϴ0#ЀR)ŧVdZǥӹQBW1)LѰbR3Lt  &lk5{C¢&94NL - +o%xHABȢ¿RW{I,ss#6SOBre]6#v'= <1q/CiiHQU #DT %x]хj l--t%IYP&Ry'K늊RZ$L{'hhS^AҨc:) rpTsr )G$@H"эA))1 y"%Sm h(ډI EtrhF4ĽWnMn]H+2Ÿ6lfCɅh_ra理>9Nn!CbBr GeF."?kƸT2;: PW_!;5m$D+f RzX\_te LHQ!Hc)m+QZ;jwp)RpoZ/{WW?K$qfI3`Vjܐn$!6`\2 aSD[E^@1DSF& P4k@`ԛUY" Uym&0Wx8R::$j1Br@#@=v/Kgt %@`y7w{UPedu7ͻWRT2z.v Fv15磑BK]enIJ#9'm~PZp8eRs_[XR}eԬЊ*MaΪ-~I#8IYQ)Xi2c% DpO2΄bhY bJc8lxALi(hǭBrNNKyCM2rŚG4R@_5!RqķdoqG+Wc Co-* cy4/ʰx&UĖ5 /K"DQaW ?v21RsձL|g+Xi<N_Xm)pǢJK(R%wZdc"ʢ d!ژBѤhwFR G `9 ?'C-#Ec- qz"E[$L"+Nx$h<7H=Pk*4j$B{9)6R!v5ۺBsIy._w\v(9SQZ!ܢiPTJWp>!yxwޑB*5NhV%A5'+*cь,jDLUy"$rf'tޣ_$/.4:4@h|65@hQBûzg*g9i_,rx-h.a7(7W@*ԔȩEޞj`:~o鞃Ռ9HyG@H{dCEMiJ-yfCțNV+!S:Vm1/2ȜA@ | =7XܧNTuoBri'uФS[uo3Jղ%T2qmT(F H]: J3pANZ[מjʌ[H[Z)<2xSS <v&]3Wƽc0Ov kyz,8x>IC9"M[d#/QhygAom56nb{_L7q+0"wlP38]91z?1&>ՊGmFl陋AJ1ϣx*`-=}ʼn-=W.Y2ַ<ۡvӾGnNMxQ.J-{ڭ y"^@<ѳO=Nl*QJvW&TD*T qɴө )o0 bpxYfDUR^ A9Uň`&WPep'VxzН`صF̴t]g\*g%@) # 9 yQYr>qa(n9WrD \NN_1Cf\p- it[tTH5DC; wOJfՇ7@HvÞ;OLΐpUQAE>l;9-? SCFD7^Q]EyF"iT#wgWA_v&7sp7PL 3p!{RZ՘EfrUϵv bSD}|??6D&6J 5^^嬚aH_8c&> _&n|Mr&23IաxIլqХJ|%F %&{_ZW1ZV _Q+yG^U58+NCmJbdYQZ*TL C(++O}N҂(P38'ږ) |q88fs,X{#g#{ZL!jUk?*&y[<`IvUe 'l|xڅ #4-vy#( NӸ|yJQZ(pךhX 3)YOwD DM/% S-D e%B|$,kz*.ʢ*Hj|zEL7[{Nj$(\MWBp܄[x2噆C򮗈F,-#Q 5tJ6C 5 Nx^=w=Heo+_drX! /47dGn[%%Y`wRbdU=ȾLM!d0I6FʼnF .[lvqL.ߗXKPjNjFf{=$9 Kz )arSj(sPu;+͕/S*e^4#dIrduB#@9ᣇ A4 D Eшb'o7Xnm aZ[?h Hc|R4Yzuh_}Mrz}B-:zP?~}cy?[m"d/vW&cu{w|{7~wT(%%$TN/ U$PD.rW_b/Di:(m_ xThcb#D\ԵIG279vDJ\NlY&_9Pc~_w׏:D<B[jU.u}ՠy8ʴuY)esVxK$oć02;qfA1A1F9uVCJ"1/RE.SHpX*NH^pȉ.{'ؽ_igWrovɃw9'}A]hJzp26`VBV)ʬ %(ciǔ$3([xǢː.Tq<ӏӇ%k__()Nf7&A qLZꧢ6zeG'w/\5u!1ΥҮ&ُXpN䐊x@% @_O'w+i}9k8Vz_9EC9K:)Mm{#8@%тS[F0.v܉gP+-[ns/Qg'=1(l.j 7;C)}2!@|r5%_Zf%BTr㉨ˏ)aw q9 cyƘ!'!ߟv23nx#ƌ9El <ɉLcDZߠ5ew"-G (PMZ/-6n/-b.(z c;B&Pٹ+1SzpVPr*`.%5Lus 5Ж~6.mui@)B/c@5 Z=]?2Zs~ؙ7$imXC MgnWt`'^1$w N#NIPm#!*%5WpEʜ:e Q; GT AHȣ@Nph(ߎ&'9b^jJy[{>:d[Z4Lxͱ9;L2ix(7~i|7I4 @rDRQFmMn8n: +U:3\~ְRӅdܼ".{~(+JF¢~Ý ["Aͳo&Ed\k޺FPP8lHkdqH`UNYe)A(OP*EbY_ŌK. aؒDE)C#NiG_Q81v}0 :"әkAx/ 1M[׮q=žPσ/(gC! /U)FAAʼnd袃!X(޶eT@1׵9RLt]La5C/Hi";^2CP.1vS ET[-UkS)АKi$ùP89d @0պG`{q tLqaufg9NFDc&g\hs@'vR)j|Y~xw/{k9Aҁ >pc#{M  [nZ.nȶ ߐ7kZ,ȋk )]SM(¥n ^%S&Xx`ApA@D]p@Z@.&,LZR!L4vK* دs76| X .hC}m?f-W.^&_vvvv[Vf1{)S0 w@ta1sYE1a,K ncE |3Er/ w&O9i&]ݹ&?<>zh! {\B H.,Ƈ[4LFb%Y5*g$k^gB"C96R6 M [H ȵqV+seX9oN(P5 ?6AۅIݜ6G XAG&Po7F [w afDZ,RZ.$Y* oûû_d(k/Qt&ׂ[>W2?>ӭ&xwZ=-o޿ol9}2J4\0}/ɰ/3./a5o9q~aլ+y9-*!jBLR_GB|2O+ &oxK p*Xٲ<ӏӇ%-b/TXD%dT $i 8#LLZM[kQ~*Q\5)b6Np[+$bwCpWf&.Տp;|FuϩW˩j P@&(YmQ09qS`jN +fvyj7!q9L~1xu EgJm] ɜ j҉l? x\^bQ0XƱw?0Z"KӯﰙAHK)m=|%]mB $˴B\[t-; b0:#r-(bBG ~# 0Mq!cSA^<ָˏXr<==/sWPA ۰A \1V[9k q.%qm~t4*i7%2QRT Lގ)xejtdNCca!-TRS#§2`u«73ܥW?;xpx2QY,LXZWb%1x,; V@0QHС唶r"GF!& o0d6XsvAnvR g ǸC9%VLh; C/!nqflΖ>Z4rq"Fn!>0 ;֚KVZLkgȁu0*>]s톃Ǡ_6zʙ~[('T˞~$91GTFD:3?{ƍ K/SqVb;[ݚdɪFdzMl"}a3%b7.98p0g%G:ΏP, TSʪӾ9(<$uvr ;R %'>9v6yfºȳ"=^kb9ih]0{5UЦnG#->A+rY-ߨ|^K`!\տBD!%_(kFuW_Gjg ?o7[S OO$ HӔX̉/pIg%)͔ WH{1U(t~F * , 7ATA㸳L"a<ܺA`?2&KӇ2!4%#\Ij dl*2+AW XZ(5ؤu,F3#|؎s;$UCFVAW,5,10)1950H uyƼMdΌ7dd EMlbsarSQX""F,TaIB^RqF>smbMYbmݙ}f# 6m,3PMՒ2yc[xȮ >#!h>m! (惡-Dܲ-[;(4:4Rr>P5b͠*\d{S(+2 mrϟsAC;țin8\ZIZW,թAb. P(wJr+h+hCZ"\CVq!G!ءm;ћٻ/K9-( #e]g38VF:[=xt[wt_m,?&l"-q!giW<ѻ6U@Z!IG:bR焴 oSB"ijcT){`y3v%*0+o~fX忯|ij)9A5Wars0aa5P;yb_rJQcpzqu 1ݺl:{ߋs;4tXs-ޓo6C'y\Vs*nueNJ+}w+mE'0G_zV<0Wk]Py:%]3B{kC"JxuÖV^[B] &lAz)"TE!Lѭ9j;q:hnZHF>(H{fW.rЌg{x@mVG/19vW{tЖMXbYḑ֭ y*S8RȄ~ș4 Kt)Zj|}3٦{mRdcua߆ %f+&`%wnIK'6΍~#3O>MrSDH 7N#FW1-Ud'k"] Q85D#~(?Zh*zE)PJB!psut1+p I1Ekg!4jĺH+[T5- ilQ1)m^F(#٠-R"bc#hF|K`q41HftWԳ2."B&ʳWb9!W~DP%<4\QX',A^(ПͿ5X%=]uC3PQr"P mg裯7A,)pep0q푗j:aOJ-tG=~V k<9夳 Яf)+:;BhGPx+8t%.\oUNIJP[Fj j;ndBX {ˀ/~m ^hj]YbD1m;ϪnTl}-$XXt2e NaL bǷB<喦QzXJqBZD!ΥPi1Pᾮ ըI|3ɝ73n"YU|d7OރBZ4UP 12fB,f $J=f8ZX'2NY3pSz@Rk䅿tMa&sx9]v3AN#6%L$MQœEљFqOS9x* Vp=ӊ[+bQ`QPl[(~1LKtN)0NS9 O\1QF|{:o<|͏;{d^拕4>I#fR2a3XsW5e&'!#IQFVkF<3o DRIӍTPO%Lfʉ4࠘ϸSYN3TCޙ+Gj/ė^ywN5ʔ+Ér F$ɈM$4u(gNJ8B2 @P9XF v)q0G#T8e.;ؚm!?k۰d>:b+a?.433qqo< o_6Є5/%Gytl2yj}Y)+> 5'O [&ޏCI*xŔQ=DSMGNՄxDj2;L[X*ղc!bˉH(\Φ)K(՗ESƛg2RV>Nt(Ou$$dP":%XތSHTBnԙbM'VMKHR ιHtH=ˎ!!EAc} c5%%وLS.z[wi8]N1i!ILX Y:tuyE<4%sbif=%\V`')nczcZ:1 0,3T(ǝ$2^)d~FXhx~;~1I|3PE7 |~\r]u,urJ> ɽ)SƚwB2deSYDf-ro@П9><*޴ǜ`Z<΂uym0v׸X N GmZեPBBs8 g!+aT6cHZJhS,# ytљ&zRkj`4=}^̒)Cu=+%k)5砼DJE, /4rA+9%:%ZZ+|ֺdžV8f2GQfIE@g2YʐL0 )N6@OŤ+r *N.g|EA/'B;,]މ1`@l,P>Jz\\jbIX I9THc*4 G;ML5g`-dzv'[; ]cfhЭq'Y0>5,>Op, jXV|/W/]n\%krQy`[_Eɷϟ'|B{2Pg+R# J:a eIHaLZ {I)S%^WkuCZq0PQǀ!"u*'zjnXwI IP&<9VW0^==xuS NSvμ|D?=пy~9i{ܪ$"_D1/]hDi0$ZBYP)ĦhhJ]h wmMneON-gpLvW-BbI5S$p5=Rn]Z$ F>Z&5;\>6 WfOjE!#Q]妬[k4]]N+e5%d]6ލ9[#s2̞&z)z?{c "Օڬ0solw޺Yf?r՟r6%\umk}cIhf]kwړTb|0TK򅫨NAzZ7*d/UTu=MMtfݼZպ!_6)Auanӯ\_^ 2[ܨ@Ny}0_Y厇pHI>\*t.ZS^  uipT zXI2I>{,EXw QxH@/j*zZq$n",孚W5U#*<;w}dE0Gs%$&g5\*wVNxս Ʋ#%A{MB )aLD 4B@2KМ+RG;"#Y!VM+8%Iw7+}Q ,/Hx5rͯU3E.T"bR=e߸$J<ܨuǷ޼)y`#b[%N6\*:YT%:)IXug azd&;)")P W/s# `"{I(خt&G/ݵw9-{3LBqu%pqm zy Ӆ]mQ]Ep>d@P^̐yuYX XsjѶ۪PVdHm,y5:sGZMJwG˪ &{0?N0aN9"}Qlw%Hg! eZF-Ӻ0\˴:DBHHnpQЛ2:N"/C{9-}|ٮgpѐ{my: Q=)a8vH5a8ܢ,C<Ғvrmİ(i#֫S- B ucQeľx^QgMST7: ]Й8%+-E"21T DZ1hrcPa2$章S6;Y}k!ő=Ϟqn;ڏվPsoVZI7ڎwΉ<.%LEjNC6I) .\;("Rm3U?yl٣AԳ|m"*tNef@ہ!YJHilݝ օBBK+YCr[Aqi yQBQZmƬ9ѠvM9%ĠHY@0I'U^@L`.>>RWq}@gW L\ ۓI:^٥hiuQ=t7| (=mt :p^/%ǿVہY=s5>y[4=xp.1} `0jxjt(c;=t!F$fi$4# -1{s| r!jTɠA PGz-W)j,ղM\_^gLa,V5jʺ~g! o3Z=묪VBTmsQ,Ug(rm~ݪΗV [w|a"F߼kvf9Av]cKY|ӝ3x\30M\ .;_g RiÝt j1լ9AᠠcC{kI@[yTS>Eɏ( Pvvdyuj;]O#LW?߸܂Q6,:_/'V—cY,W*¿p .A%h޻|ֲ"aX1 # I(#Pdb8K"IrSF>k~ Y,,xAUl,F_rDzڡG&Z[e>G[~gm~st}Lfû_j̠rٜQ}曯n7:9!->Ldϫo__V۟M g*_Nso (4:R "qL*D BbK5d:Wse`=]qټH}o_]sVȎ\KǾD'"NSZ%\C 0Dsx&[qb,"5h.;G\dЮVՔ)0$&sdqX5'ĀIbM5ٟ$u'r$ֳ@ܿ(v0>F-/g +ߖ[=^z9*]j1L Ә*id@RČ"1!]]a|k[ IHҦR( t|75={񱹄CF $y x !v%0]sxƏfk;6MO&^sB%fd?3>m7'>8ņ'p9YgOG1]֜\p>ut͍ƩuK1E*BX1Lcn-gTcS2HRb ]6; Cl-^S؆d- %*RH #) ڡ%3N^7=HcRhC0"՚BY֩Ņ\ e,BI&=)D\^0S@SSŒ 4kV4 &Lc=_s,X{s@}E;+ׯ_w`'[,?y'Ɏ"l9/44 ܰrW̖NG4ٵ׎rzάr9~>Z#9BPoZ U;ֳQ+ތd?$Y=ͣ|@A.S ?[gSr-)٠{g Am#+X~N %ȟ/L:1&*u^c4Qω^Fߪ(]GD[Yn3,)n"ڤi^3VAPt{{ah@֙n#1&ݭ#/ݹ`ܚ<=WωTrճWH҇q'4)yY9Go:)1D0!xtw"EES !,W v_{ qΫes~oZ:8%Wm,D1ܟ_ ผ[b6ރw^{%݃zz{sPAJ4 טeMs!+:k|V8\7vc(R)IU<(IRɸ ?4cV\eG7L1o&XM2~`bp*$ 7~8Q<ccp ?`]D\΀_A =]>owHB4J'HI )=$'@J_;2zCRFuoCt_>NW6)8$B@,"aRIH@+j_xT bVQ D]M5E^:o2U"NSG֍g/\8E@}^x-{1.J˄<҉<3drL/}HJ6r I7T֢LҐ٠dV1˻0xj͝})4)S\PeŽ.txnmkmQޯ 2fነk0$Luj{_0칔T©ly#2{bzPrp !j׈IC%j.(lj@_`d Z!h>mg <4K!V?M`ƃ?tovVF\Q'їf A)+ܯ!]f=] g:?GSnԛGz\A˘;h:^IE( `svD#LcR;TNd3NS"eKN®/oK-]NNj GEiU,%NCRFl g#=nmҔH\ď'S>4<KW6}ye1e=F|,V5*%T;{ )]@  }Z7c<,4Cmy!eǟbv@r3Fuv`H@d]bힸC:6@PZ_ef}t4 ˔[IحTїyU4"'%Y]p@O뗹bj;nqޏh 3pw C /ǩ! m:,HPST\HHixs a;4;Z;(mUjp9ETG62`Oݕiami5?Ƭ,mVk5ފwm=n mGūq/,:)E8.߷̴lE-[R]X,ů6_|fmPb.L0!G[ )!eX6W0plzTv/8J(6vz^@pXAzq!Vrkpea aV)3&Q%>۞0c@bIa >Ibk 4 /_a"=~n?g۾Dj]-y\R3gfe+PLkW8E3uD;P\(窠$8S0!pCSEmC´r+{n @*%%oݏ5!%"F]L1Wr8%&kR,%r_5 ԥ&@ VI%|G7?|qNw8LԅB'ToʋRN7vBǩn-N4'ׇr~W.Sr Y:\\joצɊC^CU[;%n _L`gfd:_/F/=.̈GGY:x+,Ώfp=&,"=6_-zL{лHL◯!^I :+9ZJ;p+hl!-.;G,H05.kubg䚅WGrߘCޢBMf T fᾖdy z\_L -YJR (.^@]O}Tzy/g(r\Y[OZCÅȮFJXx;JW_l"{vi7OޓeصZ':ϗ[6}sp]<-"tW쎗+:f9G+?KZ&V&|$8r" uN1@h'])e|(y6 R;z/=O79d@QqMY6\ K{3LGY>Z>l_ i?cq3vXzCF 2 ҥ8S Yx=+0j1O66åe+`B8}rf= 8BD 8,P ]\0Fq ]XlU2EQ3& =A K%tGH%Fb)xO5ƑH9=9@'}8-V+a ul]Zʬss](,!jT{\Sڂ@P.L&/'n^ 83y}WbibeQ(%1{}{i,?hCR6#4VF4ϒi̷#J='|JU>ۺxt.tc]YEӹc!6)%?]y7*UGnM11ݎPͻ5hwkB>s=ܦ:!3oVLe^[U UNZ2oL5Tdo.a($7SJƽ@ :dؠ:Ԥ3F5;k$H-3v埄A_KZّa2UlӏJ>}>an$ZjQ;BA%UtdDTPsO8&!*PvdNZ ԃw5M=Իȗ2S(ME*0|=Yl,-l?xpWi|V'[fb;NG!>o&> rm)!dePE.:L%hiYԃ9ynntU[n\(9\C?FO/jDt) ˧"&D O1DxC+:[A7]9|?=^ж+|j);mj%O38̴Λs[VFiuj@:ZPj~ <$ﺢysu'ETf+(H+m ,n/_ΩHwQ۫ti`\S"oF94#zv-td $ru$VB$(lY`s#qQ%OP 3Y BCM2=X" ӄG="ࠈxea<xh?E •2Z`oͱח<\d#)kwK{|xg|%vsA-9+gھǻfWZo7O_$+z.ٴlK-}6n䈰Q($y"'`y]obJJ(+$2Ng_-x}vǓa( Mq_:=Ԛ Dʅ\oO@wy񛝹}wSWZnihQ.ف5lB 0m5l9?*TѮ&vd)4dlM8 l$1*S4uE0LiK/..qwj\Fnh w҂3tQYCxMq*ZVz*ܯӧX,Y[pЩ Lsԛa/![0o'$Rq""DE%%0%4%pms^fiYoE/'dmBכC$ 510 /qa S gl%az8i-g0dϡ%.,j RPћU{cMo?8o ˜:\7.q^\ ݀cuȸ,hL,[6^ixѦd}y%NSVt:PHH * R|QPϤրȸDؗgߔ{M8sI#ݽ" qn0{AyF=t4e1ؽy0-T"D1Rz0k0f ]Kp N[udxa.I? oHP`iwG8I GE9=tزʹKN]:1T FO]GEb|WO\1謞3 v4~mkgN[Ehy>-JSl^zsN\E H ~W{-8q5-Q(]BS 8F ^ %UWw.ܕ^S:GP :BgkASsL~>h;w]ʹl37mLwξ>}Ci?@T.&KM}0{yG%W'L.-HBp>}Z 7YHCJًMOm@ז^AetjP,cАsޭoG{ÃU[š_,x}7m;MD,L!6♷x=ݭʱMonw/l@.}=?ڛG|p25zGߪ׽l]JzhJЁ)i9NߪzW<0jp[L5[}zz$%n<{q]rʨÜ9(S1N&t'0O=ޗIZ .|cguc8:^zwEr(Η7;aCS8LKp1RU(0`(H_?x&wy! j怒h1+@}<yMAH7, K\7YsݼWnCg49ݳ$>5w{ L9kk$u(S3,8 B F^C }cr ómUZR{ &aB`$R3EQYQTD!+Pܩe }55DݨmB.z|~`1R,3nL&j蠟rn 12<{,QDޛtd-J2 5Ibh~Eو.uqb*rZH#QUiBԜFQV,Ȳ!ך1zG;fu ^]2á!}: ˯cA֟Cs~]I{|=9f<7 -2;< l-tC+] 졗'Cy"q(mC/ޕ3㌜:2T&ى{f1JL(xG4ŅL['?:uvb`z%\~al>ZHl;̠IPRiIEֽZpVvHK%Ybj 埍,@ o)3I<$ \ЋH1geLu՚9 TO4{#+Nuᒟ)7pآ`&Hŏ|A7QPa]hJ!RYQTQzJ 6ZUw6R( 2} nRT-֫"iWw93h";Q@\(BZ%cAcsb "}/{m0/[5$/!pac$u5dd%լk4m5.U~!y.w ZKQk 6LR" D#l`D0XԐ&V8L?J2XAt26nДZHb-mB&NL4|*Ke66x~W̺huBCqS=Ws˺en}1Qhcݎ F=\6|*)S$ü^ y0o@hs%D~}22ۂ4LASm*ho;5[946ArL-F4/%@|<%o޼'X B=0~ׁQ^HU!E2eD䈠)i3%TZtP_0R;SkK}e!vʶ7BE!ᜱ#D$GW5e)N[{qPZ^I}wp]\_i>5MMu 2mijLg% 0Y< Qb7sQ {r@@{Y~op wv/:^0A;(o ݭ~sUC,|6_nv"U.5ct&e!oIYz875^ARZ1SV.>cToIeS6. 60Oh$i歋Hn AX4zhŒRUkʆ D5^.TMgQ;,t(7rw D'=[?Znl@n,^Fv嗏XPi//瓽;4Wqqg"c'bۖ@oƩz׃T )) y} ,R}~ѻY./>^]+M)k.`݋ZȄtP;ֽP FńoS۱cQ$#࠷IZw*0KVe{PCA GY%CAI gS^(yy2 D-F~GXg&fY{&ޔU=:#Bp#9p uN[CR ~OmnAgߧe.U3?A/ɻh mW*Ft2s0W>y 1# E17Ex n*՗^JcK)Tn҅}Qգ0W r_S|i7%:\#fkbgEnWz)6gU8Sh=W 94qRDD(C9[C |k19ck˂OU:{^ͱW.d"iІ?1,'͘Ͽ}oq=Y[ 6󞜪(RI_%tƍ ӤEH,#Q*@Dɛ*CrW&nWٝWqZ BDL"`lvZߎB%RETΉcw nt$ @++% #.pAf{MFL"PUh=%R  aV#:X}Yh d=PC|[R h%9`V" 6,5h1L-V `gAS1{`{gg:d+ӚD~o:Nڛn]ZQl#' RA[{ u䮗"93ȣvݟ ? YbN(zd!+ٞloNzypΓKt\]z~O0b+Ψ!3qvz!* TZI/Dc$z5HiRu}?ljhĐfUꍥTJEv¢qMJR*f :Z*@88 5`!.C$dJ9Qs(q(i[%Rp߭xx[fV$H\)-esέ^f1Yf.%,辊#&7ʸmk>_V_'dՇe:,z>'>s=D !;j HP血4q A$cZxAVˇ6 6C>p޵4OKx2sWu ӡiמ#A|ǞQ&@#{V1YjW@7'D*"]4FYF"DqM6uYû< 1!-[F5H s*${Z)P ,Θ4&2+la.ErF]+KV 4J p)'%WjeƬ5b 5œb.kwǞ[t.8SRW u;B M40Q[?T6UWUeq^Ii=Xh%\OיDscBQ/ջt+)]c8+1edgRܒ0yTk%6RuBmѧ!2["S`aάUJ-4" Z9 |rOT4e+`j/^Eo|3JB_SѺ,N3+efplkh̦ϤdiQCF2Q 84{Rds飺V%8Nvq'50JGtq: =?WH)̨à '8O*pT༮kQ B> RNሢ7R-S@Dp+?P}?nRe2RB٩qWש2Π?1Yr%2 U`.C[5 ;ZsAU/V[aEJ}($N|9rvw[wevRHEO ;3z(@%ug2?U[yJĨܼ>dT]YDa?ܼ)E$clT\ 7N*i-2%Qa ifMFh_7gnC/e+*%EƧj1U8Wtc|%rp2GP CCW^ylݕ3;ML Iz87^UD÷,e:5}ئv<Ɵ8̉yl17p[ D&ck.Li&bZC84@BQ]ƖdNX)eok<_;8kM=^S*6PYYz\eI-ٺ2+$:2ELjyɓUjf%eytp Y 6q1L#Lv$6C=Vdsk'7%(KȋPDe /8تWLx @woGvڙ՝!{UP!{\"Ytoaz'2o|c*J^S^yW f0 2QeqDDIyq4Q6"S3-,O$#Q Emyb^$Hk25@ *>rkt3U79"]RMyk $=5j|`ޝSZFPC0 ([Cb|ȴpn|E^"JU";R#Z(J$W͇\JCFEjQ5;_j"`%_g<)@(=WJWes%CJtqcO9 Gj*A*zC, DEYE h Z@BwE@Y71:kZypq931#4W ĀPIQ~UY`kH Oj%hjoJtn伞ixqq~Ҝj2/:hUK4O]}D\.ªhjI?KY>%XO[=GtTRT2%ݪ:]v=U?{zxa[tNw{s/A5X'QZ Vb+VsF[;߼~yLly75 .d&TǸoxW7be|O8 ˄P#6lh1c$1MbvpDoi BE\\bU덎*|v/#ڜ4hPDpc2qc~(yG[]7B8EL^.r`rj} )SҜZKn+fd:H|U&HqF'sa?١<%$c6hIA9n3/ ؛|yVSฅs1ss$ T*hzgA;M@=rQs4Tsk SoSe ʼĥq)JsUJ?{]_r $q![0Lul/zϞ-!H1)Rz6i/وU'SRk7.ZSB` ښ2adeXvZ띍Q@"ۘ!4)Le*%z@ ^9F<'ƵVT*Zt we͍H0smMcgx^*tk[אR{z77QbP*$-28b0ʐ.z mKv[0a zk0')VVK! Q)X x,rfS3)ÄX|PBBlOϨA]Me{Ԏ -Xh8ȣ GηusFpʣ*@b{_tr&K"!~5eH^ۿvÃAhꆇi7Dnl)h!>݅ծFWy,N"*b'y&*5BT4uYJpfQS:+ 'ٞ[]Ӹ" ]*CB:|Ax+AbQ"zb:8~V Pɏ&,Y6V\0^m+%>|ž~;p]#BkCLB@&GxHg*fDbс J&9>jzhN(л"G|9j|1y3tLQmˏZFj{L.>9nk/G.dt"QUͿSs*=>]"[ M_{Ar z -b ]HPWa50cYsE7{kт! 6VTHyhOBZ{2wModfHgH:nDb>[g*) **IсMbzFv!o\EtV+uuӵ[.16L75Q2;k(FZ.4䍫hN87"8ds&QH}>)~ױhWvK™f*-BpۗbűW's9y,X,gYY-*9 `s9^;?<\0 A>?b!A%+](̝\\OϿ/S7x Sf{يѦHgHcك=3p>o/p> [g_aMO—2c9?u\`HS9„x!x.YΚ0ks| heK~Nq= z`sPXLOU? "H˃w~;!^Q>Hڳ)BhDb#R? /6LlJѥX:` 4S()/.?H4^}htc$J7LZ:r;^ ldS=%P^"C"ny ƴTpL޳2B:y >4Y T9̤Ɔi/''7#׳ L‚a}7s G4F$QֆÜ%LP9ws|))±M+) dfr:ǖļmkcC $d8I.*2]xÉm'+s'Ƴ_BX{j~ #z(v&#I{v'&9iQ<݉G0:u#%z7 }+t5@\.H q%dt}2LD` d,-%B,KrHB*ZXQ*5 r\O'tl $,5^s89JEYaBa%0FbKg\(T v3;Ϟ]:j/9 Mz|}7B?zNxk笍ZUΉQ*D Y9g m:\>J#qO2XŒʭ-Yj iMPzirH)X5FBad(3D{)Br1;m bȔ0RГ;QGrWmSU݀5䷋˰ktAȌ3wXuc@(^gXFcnaZBJY0YqXL/no᧓f_N*գ9'L_V|?7Oq?xa 돟+xл@eȾۮA#YeC(_Cޑ~fR] 'Β*a*[wړ `4Tm!crDqzZ&lPYXa8):(`"TK%ᨅ .T))Xir%Q v8ƃoQ)QY{싗 fc_DĠs²WLZXs.n_|(Mqe|6|/3 _j׌M D00ѴlvkU} ZxENn \ S^~8tkP-1ٞx-(HQ]wBJη":1#\37 pѾ r;r%Jkƈ47% V7/#\ZՅkϴ1rLFiU{&jpl%ԞI(U T*b}ࢫW's=|hi1 UϧbK(HH( .&DyAڽ(S\;Ii{B]sztws*5 l@0RYasJ,Z["4" 昀?^R^*ҞjR[}t)E*/ I䄤RbX*t!U;d,BH jhOA-!b-g#EKDZb=*+ 'ցUB$YAF-1-!a9fj jaWzKAWH1f$jgH(>kR0'Gv &Z= -Ij h-e[,rZ",Xdj磖D-[q.-Ux@>kP-$P2RK Z pcMԢxM)M=GHHPT"ν0G"Rsx" Bt/( .Iww)nqմ(o/o1O.n,qW,6XDT0Ӻe-DZȾxњw<<7|#, r7ȟԧ! 3}6[,>?¶%{_3f/:87]"]{q ڛ@ 4+8e3sko8qMSRN9#62KL'2Y넗TWRN8Be]GS6wyw(@.oL`QĴB\G*y-ۉ^e$eIފ]qQ1)N:-?v-0K\LqH-P4 Xn`}dއj@mOծz7W!'NNNOB&ybN/ꤺ:gNѢ{^Qv=c޵$"KGkwл7J,Culm7V6po#ɱoyѾp6C`矮.SvMb?W~ӼM7ͫlzm 1:I3VVjOIQ5PQ&*/1)bZdGVvy l}^wΦ>Y]̀泄ܮf̳we㡫kywqj<o_rԫ^un>|g*C] W~v/Uzl &vs9=َ{=!vAFJ(Ȯ8uW(&Fhب3#5FMS;ZDm `V"Ӧ`OaIvε^#QK~sxڐw,sr~SgbmZ#{sI܌Gԙ/Mie-ǣ(38-$R}^TR(1PꓗD/պKnVx1['6}Gjy$.qQ{0ڟE{i瑺<8hS^½sdWw?&/^ xRYMsRGs;#χUx@}h/_~۷{ ffXD)Xo'FTgT{[oQi7~v5^yi fzWu=}/,=LBv;/i @g %d^yS7 J=ZgwVf.\GW܉e^\}>w\2U mdz߭^ˍl``M!F!2iDcyoz%\n)UO n +y8QxEi`R emdĐ9Džia3$51! uűՎK hҌgj)]Iv {ZCxIDι:&F [y@mEd=-!.onwZ.3$C7;yeo~4"av]ןY t#QfYd:8 N]S|^у%MH\em4͟,&Q͜㕪q/擙XaQA֔h0ߨQ-;Q ^v8dJdBJ{b ~'}{Q0Xf0 S:&x\y9gGs Ku3=D90[|\ l2SМmPWJnPрA!?.|O sq=,!n2Wﳅha9T,ՠ,X \V &Ĭ'zZ%~}؛XWoQ^EyR:it|aCͿf #I6nc+*?NHns돝2fSsU9jTjTdTHG?$aZ=qޫ]C%{|4Pe6g m=I*be쐄 AG ;םvl6NO)Epw̅XXnAyFD]AOA sf^25jO_v5Ke$kKVlpՒձ@o<cTp4%%=Bzޘ]-^{.mq־8kׇ{KbaR{EFÁj5ug9kꎳPer}毄{T`L!>x@#(=GHko\tZi(_AB[뽓i#DS*3ԑ6.9vƓ;)=nNf^NdҳQ?=6cJΎ.aJZ1_)9mm( 汯`8X``s)ZCˣj lGrOF/)tv3f&JvYkhFסG(mR-[|fSkB)N2.Ze芾:ɨ p~= # zppy[Z&/ge_2w?>) jg}EHqwk{=Fj"L>[2(*aհ* HW({V.&2@*x9i5Z6f*mJ`\&x8Id%kiNR4AUNSLPo=Ւzm0Z ja?""F j khOe`15:Z5ꭓ?kďF-ԧ,/2^ꛗE=WL՗_~{ /̯Q s_562JdlaMC6!WG%'82zZx&y ]$Vs-Xl|{8-^CoyilǢW8+>c., E1GQ;ԞDCU].XuI.1&O+.lI05zԲPg`Jɰ)fQ&aNLlKp>A*\nj )["aZBP%֢>&cT>X^J5mB-7TrIAIVy-ΛL2WSrS/H8:i#_# "APEÕ$Chb6Hy2`Ұ̏C^eHA(AJ_sQ)fs#*Dx |sG-zO.Һ$FUNUlUIQDA0b 9`2,@k0{01[VĠШZ5$dg%(cCoP#F8Vg QTJd \Y4(!++ e-kJ=ecY(æe\X:bdZe2})=Z«v{ 5R G0k58ULQ)'IZ)\Pw"Ԣv~ ͧy`mr3 Ag8ȏl,^/q zjߖOۋtgt]\*|%]ݎ cIGDAGuN+Hg)=8dLa;DLU2'\O ;>ͪDt`_[͂V;{8]?|n'XXovw]=(H0>(4hhH w| z5jqw(u ܠ8>Lqqy-b_bpK^5Y8OYԠR@\S.ÙvY.ֵ͑N1YwFYnNFQv2H-HƐZՒR4;3AEc̰34xu}{{lf7'=,ye?h31~ W2c $'ol@|:rp͐&ٍ}2Eq@s2ːY2`L3w+=/1Xܳwu$n dulD> K`|?ْt/V \r,KT7P T ;uu=oy97˪l!-i^im6kNksDyć(>.?ze6ghYOO1vi$=ӓIYe\SvaSvsKKc0P_lf>8泌{1k5'G ,lPWml :ߺDv{hk8[&TR72MoZ~"|B0D[R4PgZ㸑p7@"YxuCXY&|cMn6"U,ʽP HTM;?{P.H* lN! \ i g}sL~̵=`r=+R{A:rC7z;N_Zp=w+"1 r残˹mI|՘ sP(! B8i|]> [@G]4]8m\Dʸlc9uo!aWnrszS¸vmO]޿X쯏eUc[`9n0}<42(l܋PٕϚt:/׎0ķ%/7*!3FTm}^( x*,PTLIɡO](xLqpF׉{KBMu6oa`>2G{9@L]rj kHu'f5~^̋F+:hhyfHu,:0k\{h>it$;UDl4_|4Z,_=BLR 9K(FBۤLb}qzWAYJ/sJNVWCqle%3AsV9R~TS#T);l }XqO:H5a4 U^\xq9&:8L!`uc44yОi]jq؇s,+T͊4 :3FEU58s0= 󡰿-c0\Q/͝Ew;{oCF`sAu!0t Q^8hjq^`ȓǤ,̹@r֔t,[) |]--Nj<;†̂~IJUJ, (e ZĐsxR$ />C Km0˙⴫iY8fц@6_5WgjWok Y懟S߫V28O~-s! + Sqٶd!_B~1kԖ`9CYR?= \nKCwmz2D5Ϟ<׿EL|zG '*/5.|T.''X*+1$ҷia I>=XjR3N~#"\%%^!}Ӛ׆ GrڬV-}5[aIH'>ϑnEQug&Uތ?[R>(kA헽#9J׎˚AT2\38h)5D1}; i4hDyNi"&9NԊm*o|O K( shgn6' uz-5O)"w%E_ s8#N0y\cC֛X=j oA~Eq*rzŰm,\FEeLr2_Q ƀwO.uBhAuLD3~,~YѮK7Sg!P]1|1gxrg 3(a^5nO&\ܧ۳k͡]uM_*pG@q/ԭ q@x:mԅA?ܶ,un"HvJS%?u9_5]fqac>cơ/€D´.N#6ڬg U-ӗoY`z߾Ka\e./,+iwsҰ1ZUyfk7kbQV2j)Fs/sw:2_K1_ pj /l>Ǧu x9m+kTbeۙsrRИZ EA5&6o@kHnV/\rR?WJ\uhi9?`uδ˗swZOcSv޿_?}Υ2 o\x›6΅Ƿ?ONxG!(@ȍ%^p%+HQDn~hsJ-*]MnmV}ouϛ?N(ivgzquܲ쎔\֙ ܿ =}'ܮՕQ;kQ+ d*@C*i)%`TQR sj;3@GwOUg`O^! ŦÄtXՍqU{Wn[gKt5t1>ўEiH^BZ2}Z(K0DHw7^=ըh@"}'ܪ/ByFVN?=|?ǴcrryA6`pR&$c&LC"RJbtʞ'rUhU>!Srn ޘ9=C&$ڳ-d_MW kcQ+)>F]YL)wAr,5tJ Q=Sj\ ]bP5CQ2pz= \|8_j3 $1UGGLxWk9ac=!u<RghRU(f?CjqICA1d9hI"jz h5}u݆@ќ/Vՙh{{ÐrFyŤ!IP` P FœB8CjgWh"1fH5~4J !1UJc61$sc>@ͤXڦ{kQc_^QDju0|xCI+Ǜ{Cw=3~3Ǯ`/_ Wj[490rg{]:r< nۯ@}Ȼ^[1Y(խ _?wf_εǼ+pȚq9-ɢ1'a TK]#@Q+Y/N%0i'8EAtfR=}\0lTUfk7G98(μ j Ìӏ?P̾%caY.@0xw7`:ZY`Z}Pg7}lE H܈=+T^+ N1kXU q^|I<Q4>#'[ 0Nη-[HvvำϓXhu)ub=кgOjIk{J ܾ:d!ͅ8 ?q_W#b1knL0\gGAĕ%l&2<-ӝ:>RNyqcߌ{:gVK}9xR5{qӰE\P'dtŶsWG!c&2ׅ: ܙr+KPFq0)8*_ )A=Q?GʋXA]5&vkV B|o_=8} [ZI-" *ͩte6dkڑ3e ۞W}qAȈNU hCqŹK5W#ie)fr1) X1ωQ3lqmqBQoܺH:pvE! juMP =j(r;jqꈶ%n@lڜu ƙ5u@`/tɝh5n{*f-ɘW+ hZCE`@]Ш Ƕ]*ӜTg25Yf%˦ٗPrUCaַ߯19LsO26iWe8r|۳}M)9?0tccn{ߗ=s+"ؘ1:QO/;3`c.u1C%iK==Ç fnbytfٻtS:I}k__SI*ԟnޅ} TF:h)-+E5YmhdJ; CUk;N• GpzU\)J@x]к:w$ON}Vu-NW7 %Dz]BX=rZkbZ w&Z[68T ui.i0%A j.xC]v4p^`Y]/sA*벚p3ª0iIc?0lWPCwsF8ZjL@.暆ݲi;xD[Su' g%I@^L.Eꎌݡ$j,kDPiۅ/o ݥyquD8wmIP͝y(!A}%0vEe ̊KYvlFj`|\tlU J@#c΍Nk$c MU3b-&09.:_6b,^Q%,,׈"Hi`t[sj*J0 dVn΄7H($IoUvA_ $j{*ާJE(pXP 2c<ɁrUr?܈-QxOqI 5] ̒%U^$d!B_$ 㧿::l—׻鶒/BbLۣF/?}#1(W]O.͵b%5? x:[LPHYŝɸ$E  |۔gzOڸ۩wzlPEh f'HS!*q8Iz"Ft'H%P4d9s:c;%x5/:5^t1A Vo01$i@k P(@1W ͫ/  Y1d$+S^irV:MU PnqDEARkn.'*D\&)c),{Ŵ/.BkAg1{{O[8# vDS$)AXd`sd;ojMۤ m1aH'rl{@pY)Kߵ J nO}]:,ph?} + 0mԯNpqmZ 0m9ݸJm操+B4 ~UwbQRÝ`̿.?x{5OQ=9\nRn^_qۋ&9q pdN<5[ic%W/0bg=DbOcslZHوAaX9p@dBZ1%Po! u$HjNQCRצ'1(``1ȥHi+gtPI(3V k X"l.f[ ";5RkF!Zj]Pakg\p@›*cݝcJ 2։KLf()>cPzۻ cDkbsڔ`Ya,,H1 &2gxq~Ÿ^ ڝLǹS8&l** NCi оZ,/?d ,H[6?7&7(?QN~ UuAB#hA).R[f2ܛfSp'>'Akz׃}潟nɻ۠A ;)]O7zv6Xr#g?ݛ:06B}&Fx/(~^L/kaG&;$b'Ai.y36 x.3etndtP~Bov"{7{$V4XCJU/OToY6,Gyowy{z7̊ =<+z6Asힳv?U4b%ߡ$=>gMXWƉN"Dĵlj.ɵYW[,p$rE,S+1Wb<Qh}C@.C$bi~CuȀ/F> L LatߖriF%-W@6_X+/TmoW'6r+ TΩS .â}>soo%˥jtWhq'6# jjhJ@XZL 5Z駕6m!T[1>%Ԟ>*mm4& -÷h.>JAˀ =^\=8iQp=~d݄6tQ.ZvM7k\4NΰO3QH* O9$CR ;.I ĮBvv9X28_j4mHE\Wp{==DL*ЪC˗'G DkyC&%3īq#TKO떽.88fE4~t1ﳞ߅~)L~,D2 k5hV ᵵ2ZW+ake0BBVd@P .SOV@^جK]wZy[mGk9ЖZDKJ[rkS&p0k]P ksc{JJJ0Zhs0STSK*fNҞ{3ge`o?@gWfr۬" |шH|8ovz9LWDAf^e0QȾwG%p3o4~>K5% U &^WFK{ :d_]櫘Pj;aM>F0_/T]*}ʃYn=/͎??}oG78{!&Lz2= ҙLaSi)C;w1ڎcke#wRd38e>ӓ?sU]2Uřz2%gJsg7L%71PIz7-e,Mk_<*(ˍBxD2iPb`hj!ήeN|Ej@Dr^yHnyoǭIXy@IjtQr/דu%W&UL*1Y9 B*^*v9|NKiWU"שFdGoNӋa>عzvbɰu~]QR4*?LvGwG͈)DňuViP@ЊAle+:3$u֙DUmN ԖtJ)BZ\rAp.BS'ƚCR/φgz_2~f$'AfyI`3Ȓ"ɳg?dKZWVۭۖfU}bX2LdJ{7`$+hEМ(,b0XBHV"&`ÁPqMtPܧ\؞+Fռ7J}M|K?h^>Đ vsxzC䚺07S5coL#v#L+(&W}>q@ Dah\=C p ȑ=&K@4>;q*ܯd!ݽ/\H趉JQ!Ӝ~QW% 26 LeN_' }0@3tudkCgi+4co!B$jbl44/i;&P^-`,rwQ4~b lK6A/@M`J" ԲzGO%;Jp]VI)XtZ-Ra`8\\:ȁ2Tk̕_w ai)o]ZǣgƳ\wKN/rI :Q[K1 9'jYm 22&x2a¥SͽVCvչ)SPz]ﹽHOBpMx_3y7jl[?$od%uv޾<|p{|YK%1sjp]-ðNs)G5^BT]=XPoIJ^,z롏{8^Xtn+xuQW1xDD{wT;OTԢ}QCy_Kz!00C-ttbgal]7E{foƏC'_evG?Hc]9&܎ݬ^qxLqŦsq{]a0iE mez=G7/AZqMҩR1nv,8Ӻ% JuǨdζ؞uKe4ںՠ!o\EtPaFHi}F*Iu;_/`Jqk-XVq=]_ҫϬXxCcíqZ@m~4QPzSh)#gMKĊ>ߖ *lQ ǻgHbeBO.&~CifA 7N>r`/LPWןz8ˡQxd Fժll9+|]F"Fr9'LF.ԩ:_Nx0)P"N9HiS (& R +[ь7A3 ?WǮOfH(x;\6JE֎(1"'(9GNMtRLf!ֲ%;Gd)W  C-v>ݼFb,n]QS*FD!Ĝw%3CpFd8>_859%)YƄ(JSq{Zl?Tѫ%{6KDTa$B DhsHM,{Lr$5asJarldnE\C(BzedS+"/7^RЉQй@ʜ .@^^))&GSRypRB.)D^r)#'&%;9O2+c@hksgSPA0gt@r;oHgpɆ2JRi`dd'6ٰ y*STucɆ2Q1X^SK6Lf4ֺա!o\EtJ{;.XTĨNUwRҞuKf4ֺա!o\Et F ;-.|,- *0O'ǕL*jSR ϽۍA=HP({![kg~U#9ըL<: PD/9Bo-CTBG0SZP(4},dF$LzFs7ubx-~P'|ՠ*(jS1 ArhjU Mg7 { z6حǩ߼|>oS`@ {s3m>Q3 Ѩ(C;,hLl}3.] sR0(9P.m\A# GQେ6o%nJw䱜D x:C0WiF:Iq"I$8,ݯ Z=3l?}p^fCQcBKY5@xRiPT- i`XJ%8v$PJ[t@RJ:LYUJ\yR]HPiU$?KPbIHJ炤YJq!uzx$9A N',ZFzC5,|}RP#{^:, )ꍗq4 |{2إK(w~3wn@>!B@Z^GέjߩFn"(_x|n~H,s5.Q]=icEU_R=nW}gذwPޙ٧(WLHf.bK7;k "] f|uYYc>,YlzPVE ۫|~s]|t'BJ٥yPmyEt'vJ]k6m%eS ZUqb.:Mgw͉!embYPźhzGmTދՈg‘JcMJ\$VQAցu*JIDJI!E{e&T,$H.9X2N R+%LkP0σ<#3c# ڗbȢP)Er]5X_˲Lw z:?fWحjZBZ*ցBɆZ23aJ7֗S¾EWkF5́.2µ$w"˹ I℣ف1VG[:g%)%B5*q `+l?:HQ<9.W cI}YnU( $ƾv(%7UbD s0dǃ qZ9˖~l/Hue;NW=H9y^.Z ct%#Y7nb$qe$?! uNFz[Ҏ`CV |>(}PK!j)2YHU0Y0%[,咊ε.:PR7ïoQ Ar)1/no&^ط^ν0RIFNZJzq9t΄%CT9j`n),b|Z@M}G^p14( nEVC[ժ-?K]>@ȓ a8Cx}_ CrhhdR0D|=~@pBqu.b5~AMsPM|e OweJ U_? LϽ./"9)!} ٶ.td#*1K"p_ aRAH,!V*N(\0im_ F;|j-Ӊ2,w_J 46|T֟8JB*5i3'LݣR4d\XUƄX̍Ɗ0raL4=_iHl֡ǻM5+w|h, P/`4֮F$C2d#/c4b7G c 3Tf> V:Au3LSQMH2i+ȭ-eCe @+;Ws+c& peX?bUL!FBcFJ/w 940!P9hgِu,7ߙQj)M+^3:}6~JVr!%CϣbS*9[8+,l4x|usSyn:.-9u$餍'; G+j@)lCyhP;9%BIsEtDw0 W9$n6Sj45WaC{|C$EWt\*k:ŨU F !B bY, $ƹOUt@^ePpV,LIqc~羃r(^AE;%1a'%1"u79_;x [6 فRW>#<]j:j y9Jtfow(w_J+*q.t/ޖLn .DH*1Mпڑm%ihG:gSLb<]T_eړ<"!ZMhg- w~FVթ2|/)Ǭ5V=-кա!߹V),Z7KnU1Q(cE(ĉhͺU{nuhw gx[)Zv{uz=kr{M[G=vT24jz3yݬ^}6oV.thXTM}4R/Gh3;Oyt` ;}BwEEuZzZeb9D\N-=$]AjƝ^9*qw0-ei2l!R+κx|\=sj!ﶤVtZzZJKv)ߌ_ :-j-%$LK ZJ-=$]Ajߝ^RRt(pZJ!LKS9uk)aZҝ(Eh)aZҝ( ݌Z^h}kT= pu3k RYܞN`z^<`RW_}3T' !P.WoC)HN^)Dĭ9/L3KF0k j/eIo/ _pެ*k"w5qhLZdT)xz^N|ubr zH tIX,wZ'0+Kbyu֙[mh`Gsóg b4DqNkOݪ_&uiprN2&Qz]F26caԖS/lמ@ X"@N8"CucׂFƖ $NC4M"Sܵ/'bSȭRF ۿ'dw[;!-t.lS"sEMɺX%Rr!ag N%D\kV"%7eH]ئDa^'%%&d*ѥN뒻GInA:ܜ<6d\KOse|+V]l-W*jBg$cdljM[>܉7]ӡ_ʒ7?F~hO{0%>mnTTbqp,4W=/Ym<˱U%5";_m:۹{J7VyC8gjW(Jr*;Г(TyWtn[AvJlB]&"~4_$ǕE3ƎokM [Y?ٟi7wo~:Xa 8``gv6Jݑ}Ew(pd9CL:,1@":{R(x $)o.$iHɹXϧYv>m/1>D-@悖55`T`K`w3VEV?b5(E*1m{z _b@lBi~=i 2o>-ۭD( *ɱz, ˝dzvI4N^>ruHJZs>jPQ(܊ƨD4I]C4Z>KuCvT> !VG1t:iܕva>e75a(W~Kԉ\ZZ7y5H j&)vRs\i6AZ8u uUC퍶|F@P^7uU ʔV83xH@.>B PNyFl2zXz3Pm_]]z^Lɮ [}VI1Lx}8J⻙#dhH>ی[AqYhI)Ή6PKa X#qK2XRiU1[D-D%Xq/L0(IQbRWSbQ4-]_4طQzY5N۪u/^fVeS陷x(IrN?#wo>g! =̦>z "| o_lXQn9;˱;=0$~o狿|tz8|@!{\r2q<fD)w<=L{ꅹ*V!TC"\FU iR*,]wU(_65Ω+JMHWʵN,Hr7Ωii&ʯ;-B-@=p?K}Wyˢߍ,Y.UɹWt7>f9-=ww> DK08 ؏jsXfwL"1sVj7Z "#@M`;vv˜&n:TF^\LjSx4 #j/"@D3)k&'ܗ@ Evvr@,e⦛FNCBꝝ ^`ߍ<QӆcKy' .Ig}`CNzȅUVj Ll/7DX VA&N .$F"`ظV#{9TMKPV UW ][!'D(@cKUXJ` $H@&XG$66DcY?P"` ֡I (cg@YnƔֆ$I`6j F#Jן˨U~V^Q]D b(H 81X%$/PL=s#c.EU` ѕvWbgq9&c9->΢pȸHha =gB: يvskm)Vs)e;G[iG7 EvH Z)F ƣĚofl(N>yshl)pÖ bes^g^QU'8THw7(\P#BrQkz. [Tq}.YF QPv?8÷~iwfP0Nb>`X=Xo3@0ITZρ+<Z '"h['Y bU Qjg'k pS'Q#h*R\.^!mGOmQ{ C|fջECmt)I1%1rRĀPB,kfHK#K(R&R)i\+˄4[: S[V̪ά]m D[Švz1&WE6+@Nѱ ?orq%+gV<9Ԩ˅e}?&cgsx\Qe5ay2)axy4>)ҵœf ݭ[gr}FnF̊U=يqUmmYZ9sB+v"+4VW~13oבcEp NFhoH;prG~/mhέ~9Xo5\%)+е#¯&ע/'VNi=}XTgZHB! Hu [{~G̘|8 !E=D#JⰧ~`/,'Z!clW]{W?ŎF2yM>Rlzô6Nَ O`ZVW%TͲ3# G˩ p_WZb~ $(Q% n49 9 1 Fp+kii,kIގlt1鍧N׮9^' oe&QZiBTOqws5ގշ/>#ܮ޽+sr@T'E+3 hVs߆ivB^*-H`_3,&r/C˙N< g~[D/7f+mL#6H!?% ZHKϟ_jOt\mlnJY P(:+7O~;Q1Gէ{)E)+lDs:0ACYIdt|LL:v;5.ͫ%-9'F-ԕcSOkcaLrVΣX` Rm鋼U6h^~L/ӖDVm9r2D-h:>8vx1çLs4כ7}[cIP+7nxlg$t_wNw`7_LW 󼓧F@yh;σg\V$!\DKȔ&#;ue&ٺEV>Sy[KXuk5"=[EL ysO]ney#:eߑGy0Acڭ|YoOVr-.SFQqTǢkJ38kxz=k nx*P`5ӅF4L<#$ Ld( ZP)%hWņ) &b%)R'ZN-JVJ5hNkL(`ihº0')(*-Nt6{)j]TzVvi3Rޅ4;T+<^RHyZNH)(?)M62}KRFj*0 I3ݷL>O}mAIB)8Z<&2BJ6 Ike(FL@D8䔉cq9Y` Z&m"z4moJޟ|My@+Ç7<^\fm3~ qpy1uVi<ך7-ЀCnh}Y*ٜ|^iϧ)w/˻o.4V_6Y^ ?Nn}}Tf,Nkm&dy wyӫv?_*aK2*)OQ)is (ǞocQa/tv! 2Y=IgVﴴJJUTw.B;yP)Ԓ=<0w1ͣU)q#yu8"48 ]8t't@!qBލ:q6* uZb)0Ks\|'NʥLR]n֭kܑ.i~V(Z?dͪ6keהmv6 gh ׊@3QkbƨQ!XfI@)M9 HD!ad+$!ʽ\Tj&Uzve&'3pS)qµnsݻSGP<|ٮx]֤GŧU @`oi=%Lf:z?=4u-Jl6|xzL멷4dV_ܝ+V%1iz%grBX.}eڬ(-b uWf`݁kE)PDT1i?3 *PI_4[}dZz6 R^̌, XgD G:R='w'7qTأWu8'Q&1C2'Q0:%/ǡsd7p4ƉCe([(vtMFOa8{r2~aR7ꮭ F"MYc09NF<kQn`12Ri>$8bQْZE9fKn8Ooo4uz|<SO?LXpk<1}1w=P{bv7 ti !}lH罤@+N۽Q_~-G`5ԩa-ⅬWuׁ/siU⻙\/O! O3y.pA<`|=RX:a5z(.z=4!#u-뙥q0bAPZShǾFjsَ2NqtV`Ssx11{7] Way=2V Sv '~,W[}p*yi;E;X9#6`J!݊΄.۹>r| {I*f%FHNل(JI$HBdl~ۜ\LFDF(Áj)bB3&y)C` K%AZD0TTc7'}rG,3La9z/E5l1R.}w0 74RY:۩M?z3>  hp;N?q!"v/Tp3 R((Ϳ_5;9BuhР W;:{bR*ĀhˤM)A5ZTchkBgLFa@HJ@kȄ[|$1 A%$1ۛyC6O2U <8Jc>!S:wmU2# OARBLX@ۋ擧ɿ\\'U.Sɰ/d gc\}^{nBF۲xPT^sJ9d~02 .`P14q/4+D9Ԅu=x!xF؇~ GZV {CN\|w]91< )We\z+LPJ0Jܽ.]wdOiNk lJߜqlHH0/m(o99**n גl"Ipqk$ ɥU4p 2PҊJ0-{m)b9)*RR$D㕰!WAXl@)pCDDh!M((t]֪ ^!ax'LZhWm%eP*=aӚx.8kif #hN "As1W\I XBQ4Y: @0 Hf0]OOW/hGat9.oT|3e'=:=|Ͱ9)g2ij3Jnh:ޖ-!aGWJN{tsM@e/7ҪKi[Ska=c"JkY`w'a& kLu'Y^gvw;J8;yb$g#Ľ>$yl'B(-+U\xZ>8ɨ VD)Mtlvk]_. *ȕnz;ʗ!|2t#fLڅ1Gp#2K^]b$6Wn9Y(4֎x}KW(NADUlr%VS T#aul+]:5LVW 6rjlنGLǏ-H](%Kp#tSgtx}Z!ƋQ,1Ŀn_Kכ70}nn&Cwl Xqc;Klޙa,-)19yOopN-Rz䶤;@HB^)qvun`5,xbb(~ZކRФ[oG6p+}1鍧ǷzrN绛ۯv4 oWu~[ջwyN 6m6WK9|۩?{ȍZap`O9 |x-GC+$%,j3FjbUb5瀲[cA;Vwvk>Zqݶמ\Gz [\2ŋ|~uy"_)`nO\?F@ԇY)&('uu J Vhp"{N5OYB*J $-(U)!*Öhy8j6t= qQ-'#7*3ŏZsn&. G^G|ãĵ!T` A??i^㦴4[䈙IzNC3h½!OЂ E.9A2j:$:mZ0 F[<}'COf2:D(@8$RJu տM8t{µ@m'D'~ϠcN'?n'^|7[tKLqSmZ O<"mTKġ`ZSM UD'- ! *XVSXtps&d V`SiߣiT0MhMvSun|B+}Gv¸ tJDo-GkwBqͲqn{7A9x\N;xSfFͻwOn]X7n6}t4Vu M5?Ma'2+'^vBև@_)ZҀsQ 5"5;"ƏHi^A,&y|kE՝ږz'au/-hEXxT B ִ48aGcxp $wS3hvSu¶8Eo8H3厍T?/`65@hC,Qls0-V#["Bt 80I܋v@W ѭeeN& c*, \J%RW0уM`DyBR{D`=50HXi r,@!nK-6J "7|=iTxb|`m{fe?->O?6fTM G1Y)J94a3uO.߿gXl6'cb[-uN8ZAs0TJ,4֘3yy҉JjSdfJITr8 *v=ɚݬ'dDѓәRGtɂO_Z5hI)gM9n gxox } ];G.7! ht.G<\\ws@LxxadiLv {g@ w%R@{[1xt{:O/b9U~PK/e%%=x TxTjGSƯ:AIT'}~.ICM/x̖}sFm N4:;ZꂠڭO;NdLTMCo>:բntф,d"r4Ҙ6MV2qr Ԯ¥a.wRa!%$eQvpP 02%Up GԱRD} BBx.b-bJ%iJ8O"NFD0 sq[B$B$C*聗NHut̔qP kǥp!PZÿ/{~ŀ;]~X!b駲*-˧a&VM%i55r-[}aGL1}?WF|0+ D}1lۇ "ISzp{V&&Ǧ% ބRq1,χ;)4Z".SԐc f +SĦ>R e :gCql.p35ΛOfZXڽ"vXt0](;ũ Kw3cψj*:#AQkyHz54}_\ ^; ĹLJD1ZP @Fᦘݫ֚ߊP!=dp54(?a%1q8TrLn͵N#^ /]pi )/BOw\-KyCstwt/= I9 ܼpH$OW|(XHόT_Bu%8afzҫ- CI7+!Ians =s M,\]A*Q'*cyqIqؕ4z ".0&eܧ\.ibڲCɴJQO ^@A@[xFL !0$uR%`5*r;)汐n2mIW#TJIr;ׂV"Јظ'c%#CJ8k>[q!(Û'hGjx2ӅsX^kT$d{`e@E$q ګ/-[ٓu+{bdqU#P7#0'?:K:)JZsߖu0;3~vz{wd@hWo[TZ!s jkAc&,~6$CFkz6.[Z CpH#. Y4z-Tm,ȞN/њ"(\I#?)mm" KYRqnHG9ewqN2GIm.崠Uz쨻ʹDZy@:yGBy{wXV@98 =GJ#< Ufifߗp/6{ګnc-7ڰ5=U/OndtWQ0U.4*T Jök,8@.xCqIY@pDc:T33&$*] Ǜ .LJE'Dw<1'{37 I:a%ՎL}sqzvXCͧ?̣Wo'nǕlEa:S+j~k7S pp lãs'-Ц`·$*UzuNG2uJD 3BpBy)Z.#Uz%ɀѷ`ÀTfI'ި"`H16)U߮-Ghmhd$lR̊V,<ަ'EI39w;RPjIJRR-+I<# Ձ6Rz=|rgtZl&zOFfNlm;u\Z$9gz ZZ -à Ϋ,GB՚vZUwJM:"RlYX$Y>R]#CiP{mhrVD(TIM3A(|KE yɔ!m e`iLЩK-襈K"\c* yL$)A1JsNhܒ5 )Sd=-eA$). ei%62Hpd1L# O)(L åF[yu٢ DLm9--+Xۆph utz}1?&@t=d׸^C7Nye\-5[^\mn T}ǂUMDt&^t*zG޹8Yu.>b4nZW\V_Û9,gaJ1*~Y7nY6UF\W-Fpm&л trƻ.ڶ[6ޭ M4Ǧ(gqn{7MYxL$N;ZyfDݻ2:oDm%vexu ]M+=?]MJH!ܐ*x-UjEn~V(5=h9M Bn 4?53@}%kO %WG}6.+tcKS?<9DRR5ڦquHo\'fW[L@p!k\ŻhboTFgt}@C Qdϩ4K+(Q(`TvYka 8xr>T 1TLaKfftO9ur:~xg~:_6*n|:  N lI!Bk* -,SJWW8Ch/dU&1:ٱZ"E-gbNJX kY%[VK3-GUؒ 㞼 A "-)y7>bM2_ևfQ@{,lRUGb B({,oQxwkm:"5>Hz|А$r@g2kv?mZel.FJběSNG=  GљDe,ϩ*b( 01&jK/SAo:Ǩz眚{^o6A~,ԲRͷ~(<Wx wf3XRZ!Ռ6H%6Gjk⤎JS>j#_&:fj+A C|_Ājr~ntW >}}JH# D{^"DO:AmDɩ~5@!ܡ@^USaf66JIVPwuKZaq>2cR v$kӜ(f >2?u15PVu3 Ca*h&r$uM+4UԂ[+ z5C&5cqІJ-9xG?MIY5NRV}2L$ُl[ȵibe@L#g>xɖ=U>h5ڙS۸2&O0$$Ajسeq)NFTǝ`ߨ?,ta`:s4m+/Ac5g1 V 81lEC eާʈ؋uj׹WM}k^ExR .X%d.˟O mSWIaYS/ܗ$F"UEVɸ[KcxSn5gC{q^^\v7JA7SCZ*KK;*!ԖXGB#`E%5ɬ}b@'Lkf L|qZSey'=K&hu33%[ A+o]0O=LFY 䛄CŁw_WCH W0>pQ8h3l_[={5s|S]Ծ/ܬB 8!(9&J-*dZm>3|60ga;W>$P?O,39kgV8۩RRYCEJkpǩ *nP6P)P5~Bl$OC:LX!I;Zr:KU~Cm{Ŷ[ċrY1ޟ6bCqoZ,c{$KThs\<xm{Bq!<<įc|r]T/-OoղA"y 6 y^|mffOSi1VL'Z l$6D bKFe śmg%OnjW|\5nbs/ʕ$W,UEp0av㗏S٠R fngG2Të&ꀫf9bloG%o6j?{'QI^3:+\U링nOfD!:{KD $T䚲l*Jl>VQKsEt EAA:*(8&9jmZG'#Z2yU?3^d$ejTbiqm۩K2 3ؕeqtQosH.=Ez&%r8%ɘu.}{[W5N#ǣY+hx6`Eϟj)Ph]9deZuWLM%,h*\3 Hl|wY7]A9X'A2T%nFKf^ DC謼7[T5OT3A IcT $qs˩lA"ʶyC!7]Mȏ}%Vՠ2sԔKzzkyܺOJvaeye2=mYU^tk}E}g̱(L9zyU2\%T,~n5K "N{4k $w̨w@zInR-^_3_2wv׏^ޟB/KkH5hNJΫ4@ z+fHT_쌬1t0ϒq~YjG9zfSZn| -c(B"a[bu-\C8O"0_T0UK=p|dxCk!AԱĬCtq$–?)gI&.c>f,0]$cy}sey@L0ЫUbI`1Hk}|룵u /r:;i"yjͮMIC`U3Zq?$"vOpø OE(D2eͻd1ڰVYK}Y[< u0.\pmWУR6$| 9Kdic5GvEoK<@ƽ6cw5F+uA5⤃v'՟r@[4X1e )Ub߿,~prڪZvtYsLCD k'D:6kS qO,'Iyf~w]^}Xh |&@PXJ:~[2#ke}2nuhF2En (f -IC#Fq4j}~4!G;hԨۿO ̥n ϭE^GMpV,M5D`cWeMRl"WyȓbaZ t187Q l|7Pv1&K?Yb\bxz tF.1spn U|-IEXx͓%5+>e,, z0Dj Q"ɗ*9l?Jt6P~yGp%o/~p\E6pR e)'SBKi g3OW~̮a׍M[LЯ g))mg~I|($A,. 0cr?gb% Ra'# }[~|<%/?Ŀ~(n7wI L~h3Ct1U]|{Mcgz (5\VbRslñ+~s8\ \ث6?h߯bmǛ3i'BSڊPsoΦϷ~h!q7ekN/[ cL4*>I;\K.W3E<s!7%;ZWi+Yl.װ1x:f#\LSjb تή?{Ʊ pS}C6`slJ)&)2)zh-Ңd"g{uTQBg&XۙڃaF,ؙѹd;bZ@,QQ\pjgy{7Z/ 6g?lj'$2Pz(Y2+J ѵ*ԄThspD0V[ =;X ߶on6T9gb(`-1V Rj*g'tVB0RζKޘhw⺒T '0G2"X̂ӔeDzQo]PP赪 (TwU =,/K'"C DW?08ѡa1iWpN̉ c י88ʾ.T]벻+ꍵPj- W0q6=N!-V>vy0bOcoc'mij:ZE)3lQ}bL 2);S}[|4֫l|͞zWBfu31n-vFagt$P+w KfOb ^娤04>X7A$QKOJ|dʮ&.u$_G-ʨ {2{Ol;#}.ڡuEXifD7˱벗x!|eKxa~Ljp?ހ|qF%0h H k+bx+5^ɬ-{\ݧԄ@wHle?̆d5@JG1H!I: L4pިc/ х eKrm_'W.FhCU- vb$+yv5Co\ݟp)Ж6½}w*%±Gyq)/$HTټAt6C)7~Eov8~/fKB{SNj_knlnC)>7DHh O25%uԾ%_ܺ;z=Dnv ׉ق{Am>XݪDY#*sCqA+Z7pm68 ?D W6Gۨ 9h[O?[54& A38W՟uau;ƶ6{s~mgFZOBz9'HBvEJWfPdxW0pgJn: !Wѕ!*t)Lkʌ\bwe􄚞<̶Ja JȷQ^hǷWON H4ĨA$0! ҝUVUU>i>htU>\53C(E26`q9YFõ3F>jq-+T9("ABxb\rĺ$$B6y3J )x "S-)j&Oi֭᪭4hbqNJpaߧzZM4Q-YcF˾֗1JĝSRєOV\in.bm ŋ|>D^X"/uFt\em.F?-i٢?iuI$iK}>4T!@ e']]BݤäϘ8tn+c{0$-gΗ%ҥ&*ﮦ&e" D0,##TԆY D(  3x9:b@GhУD JD T:!:d?)'[Z"h%$*S;m9uRPB "T&$G8 :i*iI3*M08vz}Ba].3hyUA;)e0LӮ}ߌDQ[_H$io_Xz zdI뉽M3vZsf]R۟sNJI|6b;$[X/C7Qv0+-Tȁ4ƕҢUo˅^YHn:W8?{'gss=}X]y0n,S,QH/QR"'^nb1QI2b+Ǖ|@w/ PlۇzE?|=I"OН2C$N`Ĕ: 0 >`h;~4Z~?1,i z*|ES~ ls4;h<5[n6biT?S:`(pه&W\gJ zOmk(Om-4'1}қqu 7//q=-c:28]&"k|5KhpӘ4c?x?OwV<+j@58%dnzVvM˕myd|>K_i6~K~Z,0M_EZrRu|~p([-Ƙ?wRˢgc pr.7g' h4['ŒX4MNՎE{`Z4bf%.fq.y#GqGҒ7J] JA\N{K4)u >ƃnheӎi[_H1Io0Tiq8IT_[&|%yy/[cE ?}wf_^wPk+ϣdA̜pZǢd'JzgemkmF+z9fblbc.yJ`IG;ds~nn$-֌ cKlvW*^823gsi{rBnD:^U| >W͏s$FY纒k@1nP+j@tdXρCf3;yџ^1NIe5F3F,Ufs᷾)[!@&l6}WxV_9ᴯϏ:l0v|ο5} 1:)2B*Id0HG3+qyEWG`K4ߠ1twq5W> ĺuˇO|pqFe<%r; YL81UʐQIj2]m!)V=@8$ IFs͆Cחgl}Rp ?+[q auTzu 5ʺ^(E T a}'GWį1#5%hո hj$$_G'0\vY" sBlɪS3<.RRJN%@=9^sK4s}GA;;q˒bn69Mg˿c:ힾH^#ah4kt.3ݰa'5^4PS9gkD(\ ܒ1Ј= ݰ/o>sp}͞vhfKgCwS/NO?XֆGTɽĽa4g~wtM`6:սg"\s]7bkOX!kOX?N !qG l[AҨD%ՎW1_eYmwp)V 8a-M72= o݋_"*lKt 'D9ꔻ&8Tch+vi`"ywkh)Cp3ݑMg5!`+8opæcjbX ٭0EEuD69A8蕀/Sr[xy|?.`$⨷! U4FXf˶z&6-Չ}Gvٌi)|d-> ;к5!߸FDqZ7!E[,!:혫 $=uOL nMh7ebv= iI08d`^Vy=&wu4oΚ|2E]a2?Luo*Ts@+i D´Y/ Vh)"aZvZϹ:a依(#BK_J|Uik)aZJ(@+0-%ygZ@.DMy-!#Tn"Gj*z2(KsdPCÂE9Q0%@gR$a7Eԅ&@fiޭ8ۗϟoʟJBr3A(w% Ձʞ$1nEw!~SFZxTCYKOZKq#E1.[Xi)ΣLy𳖞@O ۡ4TCNYKOm-שP#g[zZ@"Q -} sމk)aZ}BK1 |"H7 Ԑ )\?i Ea,KifQ0#&5A"E֦$s1Xye䆋×xĻ;gV?u սѣiq.@DAQ-T9gw<}7ΒObَt퉼{/w{&N5y}CҽRyoҧ9Ȼ,!yt?m*QB!BabX(oXwVD zgaH=GJ' ytxh2r^Yi*FKeḦ́FLu#RhS7`&~M|8đ8w [=v?j>gKMg3KFb<S|ezlbÈ`Ct$k?lO=2\&`}1k~~׷cOi\'ﷇո]0R` uI AW` A^&:w7cRbXİ6 /ɒWݻ$$$X4~PaK'I%W4[\l"uAVl?MY>%ۓFA1 $lHFX9,S[L?>?<Ń|Æs:ٜ' nTl d ĘVnb`}L,G xHP"@)r>J 4 X c=4*آ@lQ.hEa'@ Hf)6fL4: M"(lo(FZzkFi4U%Rp|Zprۅ V5;]~7=m#{_ 21T'5}[yy9ƛ,J>oSWrپ%Y(V_[ѲiQJWqw\h}p1'ʌ$%ĔD:QB6Th$ to" uI=`+IkPFR$L3yTHaNcDy^! 7 d<,H3ѕntQ[ڕFQ5^'uy\+AjIav %Mvk5#J){=[@i &m[]z}1w ``+='a Ѧ-ȓܘT _*J,c9ʲ&L6Yk*eBv 1C'J3O3D ),6P[,)CHVaj!BfP_zW!@)kI1Ja1B@t$:ØeVgv  x=F &4NrbWRL)H+ǍP2 frjRu,jCTR=[hqmu Sk>w*5X`% hAƌJ90Pq-+@e;$#0@|oY>=j0Ct}. cmW@*{3+;AyE?ǐ \/uD„I_}?Bb]㇫3)t.`ޯy|rїn3÷N2PWspNtG=C|G'7]_u!9ά*PY']M]>S 1,=Я{ 0-aLLwN__K7&N}N<('CU駧xP R'nr~~-ˢ'q[A;; E]-zR/C瞾H^jl;VOo\P(,ji x?o&jvar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005153326615140231721017704 0ustar rootrootFeb 02 22:34:03 crc systemd[1]: Starting Kubernetes Kubelet... Feb 02 22:34:03 crc restorecon[4691]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:03 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 22:34:04 crc restorecon[4691]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 22:34:04 crc restorecon[4691]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Feb 02 22:34:04 crc kubenswrapper[4755]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 22:34:04 crc kubenswrapper[4755]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Feb 02 22:34:04 crc kubenswrapper[4755]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 22:34:04 crc kubenswrapper[4755]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 22:34:04 crc kubenswrapper[4755]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Feb 02 22:34:04 crc kubenswrapper[4755]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.776512 4755 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785530 4755 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785573 4755 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785583 4755 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785593 4755 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785603 4755 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785613 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785622 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785631 4755 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785639 4755 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785648 4755 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785656 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785665 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785674 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785682 4755 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785690 4755 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785699 4755 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785707 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785764 4755 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785775 4755 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785784 4755 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785792 4755 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785801 4755 feature_gate.go:330] unrecognized feature gate: Example Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785810 4755 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785819 4755 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785827 4755 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785836 4755 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785844 4755 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785853 4755 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785861 4755 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785870 4755 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785878 4755 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785887 4755 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785895 4755 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785907 4755 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785916 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785925 4755 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785933 4755 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785942 4755 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785952 4755 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785960 4755 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785968 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785977 4755 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785985 4755 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.785994 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786002 4755 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786010 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786018 4755 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786027 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786036 4755 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786045 4755 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786057 4755 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786068 4755 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786078 4755 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786087 4755 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786097 4755 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786106 4755 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786115 4755 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786126 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786135 4755 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786143 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786152 4755 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786160 4755 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786169 4755 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786177 4755 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786186 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786195 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786203 4755 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786212 4755 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786220 4755 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786232 4755 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.786242 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788284 4755 flags.go:64] FLAG: --address="0.0.0.0" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788308 4755 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788328 4755 flags.go:64] FLAG: --anonymous-auth="true" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788341 4755 flags.go:64] FLAG: --application-metrics-count-limit="100" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788354 4755 flags.go:64] FLAG: --authentication-token-webhook="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788364 4755 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788377 4755 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788389 4755 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788399 4755 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788409 4755 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788420 4755 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788430 4755 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788440 4755 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788450 4755 flags.go:64] FLAG: --cgroup-root="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788460 4755 flags.go:64] FLAG: --cgroups-per-qos="true" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788470 4755 flags.go:64] FLAG: --client-ca-file="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788483 4755 flags.go:64] FLAG: --cloud-config="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788494 4755 flags.go:64] FLAG: --cloud-provider="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788506 4755 flags.go:64] FLAG: --cluster-dns="[]" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788531 4755 flags.go:64] FLAG: --cluster-domain="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788543 4755 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788556 4755 flags.go:64] FLAG: --config-dir="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788568 4755 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788581 4755 flags.go:64] FLAG: --container-log-max-files="5" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788596 4755 flags.go:64] FLAG: --container-log-max-size="10Mi" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788608 4755 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788621 4755 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788634 4755 flags.go:64] FLAG: --containerd-namespace="k8s.io" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788646 4755 flags.go:64] FLAG: --contention-profiling="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788659 4755 flags.go:64] FLAG: --cpu-cfs-quota="true" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788675 4755 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788689 4755 flags.go:64] FLAG: --cpu-manager-policy="none" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788701 4755 flags.go:64] FLAG: --cpu-manager-policy-options="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788716 4755 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788759 4755 flags.go:64] FLAG: --enable-controller-attach-detach="true" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788771 4755 flags.go:64] FLAG: --enable-debugging-handlers="true" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788781 4755 flags.go:64] FLAG: --enable-load-reader="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788790 4755 flags.go:64] FLAG: --enable-server="true" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788801 4755 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788814 4755 flags.go:64] FLAG: --event-burst="100" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788825 4755 flags.go:64] FLAG: --event-qps="50" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788835 4755 flags.go:64] FLAG: --event-storage-age-limit="default=0" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788844 4755 flags.go:64] FLAG: --event-storage-event-limit="default=0" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788854 4755 flags.go:64] FLAG: --eviction-hard="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788866 4755 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788876 4755 flags.go:64] FLAG: --eviction-minimum-reclaim="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788886 4755 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788896 4755 flags.go:64] FLAG: --eviction-soft="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788905 4755 flags.go:64] FLAG: --eviction-soft-grace-period="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788915 4755 flags.go:64] FLAG: --exit-on-lock-contention="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788925 4755 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788935 4755 flags.go:64] FLAG: --experimental-mounter-path="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788945 4755 flags.go:64] FLAG: --fail-cgroupv1="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788954 4755 flags.go:64] FLAG: --fail-swap-on="true" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788964 4755 flags.go:64] FLAG: --feature-gates="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788975 4755 flags.go:64] FLAG: --file-check-frequency="20s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788986 4755 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.788996 4755 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789006 4755 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789016 4755 flags.go:64] FLAG: --healthz-port="10248" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789026 4755 flags.go:64] FLAG: --help="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789036 4755 flags.go:64] FLAG: --hostname-override="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789046 4755 flags.go:64] FLAG: --housekeeping-interval="10s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789058 4755 flags.go:64] FLAG: --http-check-frequency="20s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789068 4755 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789078 4755 flags.go:64] FLAG: --image-credential-provider-config="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789098 4755 flags.go:64] FLAG: --image-gc-high-threshold="85" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789108 4755 flags.go:64] FLAG: --image-gc-low-threshold="80" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789117 4755 flags.go:64] FLAG: --image-service-endpoint="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789127 4755 flags.go:64] FLAG: --kernel-memcg-notification="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789137 4755 flags.go:64] FLAG: --kube-api-burst="100" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789147 4755 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789157 4755 flags.go:64] FLAG: --kube-api-qps="50" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789174 4755 flags.go:64] FLAG: --kube-reserved="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789184 4755 flags.go:64] FLAG: --kube-reserved-cgroup="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789194 4755 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789207 4755 flags.go:64] FLAG: --kubelet-cgroups="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789218 4755 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789228 4755 flags.go:64] FLAG: --lock-file="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789238 4755 flags.go:64] FLAG: --log-cadvisor-usage="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789249 4755 flags.go:64] FLAG: --log-flush-frequency="5s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789259 4755 flags.go:64] FLAG: --log-json-info-buffer-size="0" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789273 4755 flags.go:64] FLAG: --log-json-split-stream="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789283 4755 flags.go:64] FLAG: --log-text-info-buffer-size="0" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789292 4755 flags.go:64] FLAG: --log-text-split-stream="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789302 4755 flags.go:64] FLAG: --logging-format="text" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789311 4755 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789322 4755 flags.go:64] FLAG: --make-iptables-util-chains="true" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789332 4755 flags.go:64] FLAG: --manifest-url="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789341 4755 flags.go:64] FLAG: --manifest-url-header="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789353 4755 flags.go:64] FLAG: --max-housekeeping-interval="15s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789363 4755 flags.go:64] FLAG: --max-open-files="1000000" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789376 4755 flags.go:64] FLAG: --max-pods="110" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789386 4755 flags.go:64] FLAG: --maximum-dead-containers="-1" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789396 4755 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789406 4755 flags.go:64] FLAG: --memory-manager-policy="None" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789416 4755 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789426 4755 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789436 4755 flags.go:64] FLAG: --node-ip="192.168.126.11" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789446 4755 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789469 4755 flags.go:64] FLAG: --node-status-max-images="50" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789479 4755 flags.go:64] FLAG: --node-status-update-frequency="10s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789502 4755 flags.go:64] FLAG: --oom-score-adj="-999" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789512 4755 flags.go:64] FLAG: --pod-cidr="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789522 4755 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789535 4755 flags.go:64] FLAG: --pod-manifest-path="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789544 4755 flags.go:64] FLAG: --pod-max-pids="-1" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789554 4755 flags.go:64] FLAG: --pods-per-core="0" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789564 4755 flags.go:64] FLAG: --port="10250" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789574 4755 flags.go:64] FLAG: --protect-kernel-defaults="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789583 4755 flags.go:64] FLAG: --provider-id="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789593 4755 flags.go:64] FLAG: --qos-reserved="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789603 4755 flags.go:64] FLAG: --read-only-port="10255" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789613 4755 flags.go:64] FLAG: --register-node="true" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789622 4755 flags.go:64] FLAG: --register-schedulable="true" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789631 4755 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789647 4755 flags.go:64] FLAG: --registry-burst="10" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789657 4755 flags.go:64] FLAG: --registry-qps="5" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789668 4755 flags.go:64] FLAG: --reserved-cpus="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789680 4755 flags.go:64] FLAG: --reserved-memory="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789695 4755 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789708 4755 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789720 4755 flags.go:64] FLAG: --rotate-certificates="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789766 4755 flags.go:64] FLAG: --rotate-server-certificates="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789779 4755 flags.go:64] FLAG: --runonce="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789791 4755 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789803 4755 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789815 4755 flags.go:64] FLAG: --seccomp-default="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789828 4755 flags.go:64] FLAG: --serialize-image-pulls="true" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789840 4755 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789853 4755 flags.go:64] FLAG: --storage-driver-db="cadvisor" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789865 4755 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789876 4755 flags.go:64] FLAG: --storage-driver-password="root" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789889 4755 flags.go:64] FLAG: --storage-driver-secure="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789900 4755 flags.go:64] FLAG: --storage-driver-table="stats" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789912 4755 flags.go:64] FLAG: --storage-driver-user="root" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789923 4755 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789936 4755 flags.go:64] FLAG: --sync-frequency="1m0s" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789969 4755 flags.go:64] FLAG: --system-cgroups="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.789981 4755 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790001 4755 flags.go:64] FLAG: --system-reserved-cgroup="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790013 4755 flags.go:64] FLAG: --tls-cert-file="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790025 4755 flags.go:64] FLAG: --tls-cipher-suites="[]" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790039 4755 flags.go:64] FLAG: --tls-min-version="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790052 4755 flags.go:64] FLAG: --tls-private-key-file="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790063 4755 flags.go:64] FLAG: --topology-manager-policy="none" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790075 4755 flags.go:64] FLAG: --topology-manager-policy-options="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790087 4755 flags.go:64] FLAG: --topology-manager-scope="container" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790099 4755 flags.go:64] FLAG: --v="2" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790116 4755 flags.go:64] FLAG: --version="false" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790129 4755 flags.go:64] FLAG: --vmodule="" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790140 4755 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.790150 4755 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790385 4755 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790399 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790410 4755 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790420 4755 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790430 4755 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790438 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790447 4755 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790486 4755 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790495 4755 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790503 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790512 4755 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790523 4755 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790533 4755 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790542 4755 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790551 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790561 4755 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790569 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790578 4755 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790587 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790596 4755 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790604 4755 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790626 4755 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790635 4755 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790644 4755 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790652 4755 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790661 4755 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790669 4755 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790681 4755 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790690 4755 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790699 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790708 4755 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790718 4755 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790764 4755 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790775 4755 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790784 4755 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790793 4755 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790801 4755 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790809 4755 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790818 4755 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790827 4755 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790835 4755 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790843 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790852 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790860 4755 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790869 4755 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790877 4755 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790885 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790894 4755 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790902 4755 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790911 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790922 4755 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790932 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790942 4755 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790951 4755 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790960 4755 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790969 4755 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790977 4755 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.790999 4755 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791007 4755 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791016 4755 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791024 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791033 4755 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791041 4755 feature_gate.go:330] unrecognized feature gate: Example Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791050 4755 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791059 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791067 4755 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791076 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791084 4755 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791092 4755 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791101 4755 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.791109 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.792373 4755 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.805017 4755 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.805065 4755 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805187 4755 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805201 4755 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805210 4755 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805220 4755 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805230 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805238 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805246 4755 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805254 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805261 4755 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805271 4755 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805279 4755 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805287 4755 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805297 4755 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805305 4755 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805313 4755 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805321 4755 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805328 4755 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805336 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805344 4755 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805352 4755 feature_gate.go:330] unrecognized feature gate: Example Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805359 4755 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805367 4755 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805374 4755 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805385 4755 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805395 4755 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805404 4755 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805412 4755 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805420 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805428 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805436 4755 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805443 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805452 4755 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805460 4755 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805468 4755 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805477 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805487 4755 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805501 4755 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805511 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805519 4755 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805529 4755 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805537 4755 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805546 4755 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805554 4755 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805562 4755 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805570 4755 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805578 4755 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805585 4755 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805593 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805600 4755 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805608 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805615 4755 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805623 4755 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805630 4755 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805638 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805646 4755 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805653 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805661 4755 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805668 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805676 4755 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805683 4755 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805692 4755 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805699 4755 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805707 4755 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805716 4755 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805723 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805754 4755 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805762 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805769 4755 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805777 4755 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805787 4755 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.805798 4755 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.805812 4755 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808052 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808210 4755 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808221 4755 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808230 4755 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808240 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808250 4755 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808258 4755 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808266 4755 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808287 4755 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808296 4755 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808305 4755 feature_gate.go:330] unrecognized feature gate: Example Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808317 4755 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808325 4755 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808333 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808342 4755 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808349 4755 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808358 4755 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808367 4755 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808375 4755 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808384 4755 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808392 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808407 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808416 4755 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808424 4755 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808432 4755 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808544 4755 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808557 4755 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808901 4755 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808915 4755 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808925 4755 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808933 4755 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808942 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808950 4755 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808960 4755 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808968 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808981 4755 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.808992 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809001 4755 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809010 4755 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809019 4755 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809028 4755 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809037 4755 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809046 4755 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809054 4755 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809062 4755 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809071 4755 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809079 4755 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809087 4755 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809098 4755 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809108 4755 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809116 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809125 4755 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809133 4755 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809140 4755 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809148 4755 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809155 4755 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809166 4755 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809174 4755 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809181 4755 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809191 4755 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809198 4755 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809207 4755 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809216 4755 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809224 4755 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809231 4755 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809241 4755 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809249 4755 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809257 4755 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809269 4755 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809279 4755 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.809289 4755 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.809304 4755 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.809671 4755 server.go:940] "Client rotation is on, will bootstrap in background" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.816089 4755 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.816251 4755 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.818895 4755 server.go:997] "Starting client certificate rotation" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.818957 4755 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.819224 4755 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-24 15:07:29.780646099 +0000 UTC Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.819406 4755 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.844064 4755 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 22:34:04 crc kubenswrapper[4755]: E0202 22:34:04.847535 4755 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.850582 4755 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.865538 4755 log.go:25] "Validated CRI v1 runtime API" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.906817 4755 log.go:25] "Validated CRI v1 image API" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.909255 4755 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.916203 4755 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-02-02-22-29-47-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.916252 4755 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.949369 4755 manager.go:217] Machine: {Timestamp:2026-02-02 22:34:04.946166897 +0000 UTC m=+0.637387263 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e BootID:2104f2f4-bdbb-4460-aabd-cf6d1f96bb63 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:31:4e:18 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:31:4e:18 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:31:53:01 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:80:df:56 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:75:21:e0 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:42:97:5f Speed:-1 Mtu:1496} {Name:eth10 MacAddress:f2:5e:77:d1:bc:08 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:1a:c5:05:96:87:e7 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.949831 4755 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.950143 4755 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.953784 4755 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.954105 4755 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.954167 4755 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.954528 4755 topology_manager.go:138] "Creating topology manager with none policy" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.954569 4755 container_manager_linux.go:303] "Creating device plugin manager" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.955322 4755 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.955378 4755 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.955801 4755 state_mem.go:36] "Initialized new in-memory state store" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.955934 4755 server.go:1245] "Using root directory" path="/var/lib/kubelet" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.963159 4755 kubelet.go:418] "Attempting to sync node with API server" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.963198 4755 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.963248 4755 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.963273 4755 kubelet.go:324] "Adding apiserver pod source" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.963291 4755 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.969192 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.969186 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:04 crc kubenswrapper[4755]: E0202 22:34:04.969309 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:04 crc kubenswrapper[4755]: E0202 22:34:04.969343 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.969560 4755 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.970909 4755 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.972581 4755 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975639 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975683 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975698 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975712 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975763 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975778 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975793 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975815 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975832 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975845 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975865 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975878 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.975950 4755 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.976650 4755 server.go:1280] "Started kubelet" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.977130 4755 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.977193 4755 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.978959 4755 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.979210 4755 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Feb 02 22:34:04 crc systemd[1]: Started Kubernetes Kubelet. Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.982818 4755 server.go:460] "Adding debug handlers to kubelet server" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.983260 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.983387 4755 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.984605 4755 volume_manager.go:287] "The desired_state_of_world populator starts" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.984637 4755 volume_manager.go:289] "Starting Kubelet Volume Manager" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.983496 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 08:56:47.588631193 +0000 UTC Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.984694 4755 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Feb 02 22:34:04 crc kubenswrapper[4755]: E0202 22:34:04.985304 4755 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 02 22:34:04 crc kubenswrapper[4755]: W0202 22:34:04.985525 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:04 crc kubenswrapper[4755]: E0202 22:34:04.985625 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:04 crc kubenswrapper[4755]: E0202 22:34:04.985485 4755 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.36:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.18908ec869d261ad default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 22:34:04.976603565 +0000 UTC m=+0.667823931,LastTimestamp:2026-02-02 22:34:04.976603565 +0000 UTC m=+0.667823931,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 22:34:04 crc kubenswrapper[4755]: E0202 22:34:04.988590 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" interval="200ms" Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.993578 4755 factory.go:55] Registering systemd factory Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.993639 4755 factory.go:221] Registration of the systemd container factory successfully Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.994691 4755 factory.go:153] Registering CRI-O factory Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.994795 4755 factory.go:221] Registration of the crio container factory successfully Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.994955 4755 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.995010 4755 factory.go:103] Registering Raw factory Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.995137 4755 manager.go:1196] Started watching for new ooms in manager Feb 02 22:34:04 crc kubenswrapper[4755]: I0202 22:34:04.998525 4755 manager.go:319] Starting recovery of all containers Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.006392 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007042 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007100 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007122 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007142 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007192 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007220 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007271 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007294 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007313 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007365 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007386 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007481 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007505 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007553 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007572 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007591 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007645 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007664 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007682 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007748 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007772 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007826 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007848 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007868 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007918 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007940 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.007961 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.008020 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.008119 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.008137 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.008193 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.008250 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.008308 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.008327 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.008379 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.008407 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.008430 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.008480 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.011874 4755 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.011978 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.012005 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.012060 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.012082 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.012104 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.012254 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.012281 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.012466 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.012628 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.012655 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.012904 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.012941 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.012999 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.013029 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.013192 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.013218 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.013453 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.013485 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.013509 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.013655 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.013675 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.013816 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.013844 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.013862 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014004 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014026 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014045 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014191 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014211 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014358 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014385 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014407 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014547 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014590 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014764 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014790 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.014809 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015184 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015211 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015272 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015295 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015315 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015456 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015482 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015506 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015652 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015673 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015853 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015878 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.015898 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.016219 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.016242 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.016262 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.016406 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.016955 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.016979 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.017029 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.017049 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.017070 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018402 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018472 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018501 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018523 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018556 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018601 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018641 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018681 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018705 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018754 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018779 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018809 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018832 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018855 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018879 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018901 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018924 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018944 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018970 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.018993 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019013 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019035 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019054 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019076 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019096 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019117 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019227 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019246 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019267 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019293 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019314 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019337 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019358 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019380 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019404 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019427 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019450 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019472 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019493 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019514 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019535 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019556 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019580 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019602 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019622 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019645 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019665 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019688 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019710 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019754 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019778 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019799 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019819 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019840 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019860 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019881 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019903 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019931 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019953 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019974 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.019993 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020015 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020037 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020057 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020083 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020104 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020128 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020150 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020169 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020190 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020212 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020232 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020253 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020273 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020293 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020313 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020334 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020356 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020376 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020396 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020416 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020438 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020459 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020480 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020502 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020524 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020544 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020563 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020585 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020604 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020624 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.020659 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.022784 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.022827 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.022859 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.022880 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.022901 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.022920 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.022948 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.022971 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.022991 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.023011 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.023031 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.023052 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.023071 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.023095 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.023116 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.023137 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.023156 4755 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.023177 4755 reconstruct.go:97] "Volume reconstruction finished" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.023192 4755 reconciler.go:26] "Reconciler: start to sync state" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.023835 4755 manager.go:324] Recovery completed Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.034770 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.037946 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.038011 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.038027 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.039314 4755 cpu_manager.go:225] "Starting CPU manager" policy="none" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.039336 4755 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.039363 4755 state_mem.go:36] "Initialized new in-memory state store" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.054482 4755 policy_none.go:49] "None policy: Start" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.055895 4755 memory_manager.go:170] "Starting memorymanager" policy="None" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.055953 4755 state_mem.go:35] "Initializing new in-memory state store" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.065088 4755 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.067467 4755 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.067539 4755 status_manager.go:217] "Starting to sync pod status with apiserver" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.067580 4755 kubelet.go:2335] "Starting kubelet main sync loop" Feb 02 22:34:05 crc kubenswrapper[4755]: E0202 22:34:05.067659 4755 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Feb 02 22:34:05 crc kubenswrapper[4755]: W0202 22:34:05.068636 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:05 crc kubenswrapper[4755]: E0202 22:34:05.068824 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:05 crc kubenswrapper[4755]: E0202 22:34:05.086189 4755 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.130224 4755 manager.go:334] "Starting Device Plugin manager" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.130300 4755 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.130318 4755 server.go:79] "Starting device plugin registration server" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.130954 4755 eviction_manager.go:189] "Eviction manager: starting control loop" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.130978 4755 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.131248 4755 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.131361 4755 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.131374 4755 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Feb 02 22:34:05 crc kubenswrapper[4755]: E0202 22:34:05.140091 4755 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.168297 4755 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.168463 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.169972 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.170040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.170056 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.170298 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.170787 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.170881 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.171425 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.171457 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.171469 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.171664 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.172011 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.172082 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.172173 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.172219 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.172239 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.172563 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.172631 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.172657 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.173032 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.173158 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.173221 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.174546 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.174593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.174606 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.175080 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.175114 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.175127 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.175602 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.175683 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.175698 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.175946 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.176095 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.176140 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.177803 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.177828 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.177840 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.177854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.177854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.177876 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.178074 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.178104 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.178861 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.178899 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.178925 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: E0202 22:34:05.190024 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" interval="400ms" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.226586 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.226635 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.226666 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.226712 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.226777 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.226823 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.226849 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.226952 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.227015 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.227067 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.227109 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.227153 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.227189 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.227238 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.227270 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.232928 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.234615 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.234671 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.234760 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.234806 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 22:34:05 crc kubenswrapper[4755]: E0202 22:34:05.235634 4755 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.36:6443: connect: connection refused" node="crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329204 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329296 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329331 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329372 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329421 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329504 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329538 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329566 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329580 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329554 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329629 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329656 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329496 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329719 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329791 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329811 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329708 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329846 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329841 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329907 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329956 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.329946 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.330011 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.330024 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.330128 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.330150 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.330167 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.330219 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.330308 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.330362 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.436635 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.439193 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.439253 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.439268 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.439312 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 22:34:05 crc kubenswrapper[4755]: E0202 22:34:05.440189 4755 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.36:6443: connect: connection refused" node="crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.501098 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.508119 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.536551 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: W0202 22:34:05.560639 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-241a6d442229dbdc51e3b91d8b4411e0c138bd44ab4bc8e7457863785a7b4007 WatchSource:0}: Error finding container 241a6d442229dbdc51e3b91d8b4411e0c138bd44ab4bc8e7457863785a7b4007: Status 404 returned error can't find the container with id 241a6d442229dbdc51e3b91d8b4411e0c138bd44ab4bc8e7457863785a7b4007 Feb 02 22:34:05 crc kubenswrapper[4755]: W0202 22:34:05.562964 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-3218f125fc95dacadac29c14db20c1f27dd24cd0166f774649950ee55cd9433c WatchSource:0}: Error finding container 3218f125fc95dacadac29c14db20c1f27dd24cd0166f774649950ee55cd9433c: Status 404 returned error can't find the container with id 3218f125fc95dacadac29c14db20c1f27dd24cd0166f774649950ee55cd9433c Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.565083 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: W0202 22:34:05.575788 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-3e88117becf1df0095b0a6d993c3c6301dbb8e9fc9929dcdb9c66328aef7345f WatchSource:0}: Error finding container 3e88117becf1df0095b0a6d993c3c6301dbb8e9fc9929dcdb9c66328aef7345f: Status 404 returned error can't find the container with id 3e88117becf1df0095b0a6d993c3c6301dbb8e9fc9929dcdb9c66328aef7345f Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.578014 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 22:34:05 crc kubenswrapper[4755]: W0202 22:34:05.587854 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-7eeef810863811714104558f0e297d61fd9f2c566180d6830c5878eaa8d0dcce WatchSource:0}: Error finding container 7eeef810863811714104558f0e297d61fd9f2c566180d6830c5878eaa8d0dcce: Status 404 returned error can't find the container with id 7eeef810863811714104558f0e297d61fd9f2c566180d6830c5878eaa8d0dcce Feb 02 22:34:05 crc kubenswrapper[4755]: E0202 22:34:05.591086 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" interval="800ms" Feb 02 22:34:05 crc kubenswrapper[4755]: W0202 22:34:05.611637 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-6f97eb359a4d2010a0529784bc06b19884d53541a4eea324e0573ca083ac8856 WatchSource:0}: Error finding container 6f97eb359a4d2010a0529784bc06b19884d53541a4eea324e0573ca083ac8856: Status 404 returned error can't find the container with id 6f97eb359a4d2010a0529784bc06b19884d53541a4eea324e0573ca083ac8856 Feb 02 22:34:05 crc kubenswrapper[4755]: W0202 22:34:05.773475 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:05 crc kubenswrapper[4755]: E0202 22:34:05.773616 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.840720 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.843481 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.843551 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.843571 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.843614 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 22:34:05 crc kubenswrapper[4755]: E0202 22:34:05.844265 4755 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.36:6443: connect: connection refused" node="crc" Feb 02 22:34:05 crc kubenswrapper[4755]: W0202 22:34:05.878313 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:05 crc kubenswrapper[4755]: E0202 22:34:05.878478 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.979968 4755 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:05 crc kubenswrapper[4755]: I0202 22:34:05.985264 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 02:13:03.555045887 +0000 UTC Feb 02 22:34:06 crc kubenswrapper[4755]: I0202 22:34:06.073013 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"3218f125fc95dacadac29c14db20c1f27dd24cd0166f774649950ee55cd9433c"} Feb 02 22:34:06 crc kubenswrapper[4755]: I0202 22:34:06.074140 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6f97eb359a4d2010a0529784bc06b19884d53541a4eea324e0573ca083ac8856"} Feb 02 22:34:06 crc kubenswrapper[4755]: I0202 22:34:06.076427 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7eeef810863811714104558f0e297d61fd9f2c566180d6830c5878eaa8d0dcce"} Feb 02 22:34:06 crc kubenswrapper[4755]: I0202 22:34:06.077634 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3e88117becf1df0095b0a6d993c3c6301dbb8e9fc9929dcdb9c66328aef7345f"} Feb 02 22:34:06 crc kubenswrapper[4755]: I0202 22:34:06.078856 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"241a6d442229dbdc51e3b91d8b4411e0c138bd44ab4bc8e7457863785a7b4007"} Feb 02 22:34:06 crc kubenswrapper[4755]: W0202 22:34:06.151541 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:06 crc kubenswrapper[4755]: E0202 22:34:06.151704 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:06 crc kubenswrapper[4755]: E0202 22:34:06.206658 4755 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.36:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.18908ec869d261ad default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 22:34:04.976603565 +0000 UTC m=+0.667823931,LastTimestamp:2026-02-02 22:34:04.976603565 +0000 UTC m=+0.667823931,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 22:34:06 crc kubenswrapper[4755]: W0202 22:34:06.281444 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:06 crc kubenswrapper[4755]: E0202 22:34:06.281603 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:06 crc kubenswrapper[4755]: E0202 22:34:06.392802 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" interval="1.6s" Feb 02 22:34:06 crc kubenswrapper[4755]: I0202 22:34:06.645170 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:06 crc kubenswrapper[4755]: I0202 22:34:06.647154 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:06 crc kubenswrapper[4755]: I0202 22:34:06.647209 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:06 crc kubenswrapper[4755]: I0202 22:34:06.647229 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:06 crc kubenswrapper[4755]: I0202 22:34:06.647266 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 22:34:06 crc kubenswrapper[4755]: E0202 22:34:06.648310 4755 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.36:6443: connect: connection refused" node="crc" Feb 02 22:34:06 crc kubenswrapper[4755]: I0202 22:34:06.980233 4755 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:06 crc kubenswrapper[4755]: I0202 22:34:06.985467 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 22:04:58.233623929 +0000 UTC Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.008334 4755 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 22:34:07 crc kubenswrapper[4755]: E0202 22:34:07.009720 4755 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.085657 4755 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba" exitCode=0 Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.085818 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba"} Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.085868 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.087413 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.087500 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.087525 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.093900 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1425a2a642241d16e817fda7f2a55a013acaec2360829dfb6924cfeeedc64353"} Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.093964 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9694c0a516a5f4a861c187f5d6e4dfcea44ae4f115df58b5ca0624ee1de36b89"} Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.093991 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"41fb68c18483e366d0f8e4ed84ce3e76e834958e80aef9781350fdbd16e483cf"} Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.094016 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9e1be4ef7a84da3c4efc5096837fa048c01dbacc0266c47237c05e92e9c93400"} Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.093932 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.099272 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.099325 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.099343 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.101532 4755 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="0ed2afacfc1a34e9d494391eadcc0a87dbad93eac80ffce87bec1711295a9628" exitCode=0 Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.101673 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.101688 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"0ed2afacfc1a34e9d494391eadcc0a87dbad93eac80ffce87bec1711295a9628"} Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.103317 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.103357 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.103369 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.105014 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc" exitCode=0 Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.105074 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc"} Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.105206 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.106578 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.106598 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.106610 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.109008 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.109538 4755 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="53dc6e93a1cf31d4ee15672f5a77cb4492586fdd71bf9950b79d82668f1fa483" exitCode=0 Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.109585 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"53dc6e93a1cf31d4ee15672f5a77cb4492586fdd71bf9950b79d82668f1fa483"} Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.109681 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.111007 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.111061 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.111080 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.112622 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.112656 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.112671 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:07 crc kubenswrapper[4755]: W0202 22:34:07.784587 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:07 crc kubenswrapper[4755]: E0202 22:34:07.784664 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.979990 4755 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:07 crc kubenswrapper[4755]: I0202 22:34:07.986103 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 22:51:43.653513324 +0000 UTC Feb 02 22:34:07 crc kubenswrapper[4755]: E0202 22:34:07.993889 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" interval="3.2s" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.117453 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800"} Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.117527 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310"} Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.117547 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d"} Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.117567 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81"} Feb 02 22:34:08 crc kubenswrapper[4755]: W0202 22:34:08.120418 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:08 crc kubenswrapper[4755]: E0202 22:34:08.120509 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.121805 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.121791 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"f27cf58110e22dee25be7cc3bc69eb426ed482cc82dabe8fc748e22863444b0c"} Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.135386 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.135434 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.135446 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.138099 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13"} Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.138133 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d"} Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.138147 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0"} Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.138200 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.139559 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.139585 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.139597 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.141366 4755 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="e4f179dd9cc4a49844e311007239f81543c5794f7eaddc14b51d7af1fddf4bc5" exitCode=0 Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.141473 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.141530 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.141893 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"e4f179dd9cc4a49844e311007239f81543c5794f7eaddc14b51d7af1fddf4bc5"} Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.142179 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.142201 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.142212 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.142648 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.142680 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.142698 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:08 crc kubenswrapper[4755]: W0202 22:34:08.175972 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.36:6443: connect: connection refused Feb 02 22:34:08 crc kubenswrapper[4755]: E0202 22:34:08.176053 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.36:6443: connect: connection refused" logger="UnhandledError" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.248987 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.250062 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.250098 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.250110 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.250132 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 22:34:08 crc kubenswrapper[4755]: E0202 22:34:08.250878 4755 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.36:6443: connect: connection refused" node="crc" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.744833 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:08 crc kubenswrapper[4755]: I0202 22:34:08.986771 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 00:35:47.862933895 +0000 UTC Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.152489 4755 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="92156f347a994b8e6f6d3d8869df116d7602e075e3daaa74e3f474acfe1cdc8b" exitCode=0 Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.152583 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"92156f347a994b8e6f6d3d8869df116d7602e075e3daaa74e3f474acfe1cdc8b"} Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.152828 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.154249 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.154289 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.154302 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.158207 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"641d71ea8d0c8570531c8d62f128f46fc2b2a80956b3500eaf5e71f85da8cfe7"} Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.158282 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.158364 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.158286 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.158515 4755 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.158624 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.159665 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.159708 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.159767 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.160904 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.160954 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.160907 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.160973 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.161011 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.161034 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.160963 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.161092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.161108 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:09 crc kubenswrapper[4755]: I0202 22:34:09.987647 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 12:40:47.409960496 +0000 UTC Feb 02 22:34:10 crc kubenswrapper[4755]: I0202 22:34:10.164714 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1f1df36f4af3e78deed01e503cee2b711aeb8242980bf71c300f05d2f034b3cd"} Feb 02 22:34:10 crc kubenswrapper[4755]: I0202 22:34:10.164795 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1447928c6f43b110d91d3c20ceeb22e2da4d3bf51454c2e1677d2dfa53606c0e"} Feb 02 22:34:10 crc kubenswrapper[4755]: I0202 22:34:10.164819 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"af7b2c6cec947fa14b2f589c8f311a78698c25f189ea2c82246386ae3cd1dd97"} Feb 02 22:34:10 crc kubenswrapper[4755]: I0202 22:34:10.164838 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2cb9641d1f1887abf886a1750d9aebf81943612ccf121c530fe5070dd18d0c8d"} Feb 02 22:34:10 crc kubenswrapper[4755]: I0202 22:34:10.164883 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:10 crc kubenswrapper[4755]: I0202 22:34:10.164955 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:10 crc kubenswrapper[4755]: I0202 22:34:10.166148 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:10 crc kubenswrapper[4755]: I0202 22:34:10.166211 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:10 crc kubenswrapper[4755]: I0202 22:34:10.166236 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:10 crc kubenswrapper[4755]: I0202 22:34:10.988640 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 21:46:01.471477088 +0000 UTC Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.175436 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4445fbe44078c84156efd3cbf0e1840323cba0e967f2c485f5ce4b2e9327f176"} Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.175535 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.175588 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.176842 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.176896 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.176921 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.176957 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.176993 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.177015 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.298952 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.383569 4755 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.452006 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.454069 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.454133 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.454178 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.454228 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.677800 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:11 crc kubenswrapper[4755]: I0202 22:34:11.989415 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 15:19:57.359741257 +0000 UTC Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.178394 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.178486 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.179880 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.179927 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.179944 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.180164 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.180239 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.180260 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.383377 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.383608 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.385150 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.385210 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.385230 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.455167 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.907434 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:12 crc kubenswrapper[4755]: I0202 22:34:12.990286 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 00:09:18.683839781 +0000 UTC Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.181361 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.181482 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.181483 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.183402 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.183450 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.183467 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.183591 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.183626 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.183643 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.183837 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.183868 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.183889 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.852051 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.852324 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.854376 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.854429 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.854449 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:13 crc kubenswrapper[4755]: I0202 22:34:13.990929 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 05:06:54.683687363 +0000 UTC Feb 02 22:34:14 crc kubenswrapper[4755]: I0202 22:34:14.513230 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Feb 02 22:34:14 crc kubenswrapper[4755]: I0202 22:34:14.513447 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:14 crc kubenswrapper[4755]: I0202 22:34:14.514659 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:14 crc kubenswrapper[4755]: I0202 22:34:14.514687 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:14 crc kubenswrapper[4755]: I0202 22:34:14.514696 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:14 crc kubenswrapper[4755]: I0202 22:34:14.991854 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 10:57:44.626987503 +0000 UTC Feb 02 22:34:15 crc kubenswrapper[4755]: E0202 22:34:15.140213 4755 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 02 22:34:15 crc kubenswrapper[4755]: I0202 22:34:15.383873 4755 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 22:34:15 crc kubenswrapper[4755]: I0202 22:34:15.384067 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 22:34:15 crc kubenswrapper[4755]: I0202 22:34:15.992845 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 11:31:53.039772788 +0000 UTC Feb 02 22:34:16 crc kubenswrapper[4755]: I0202 22:34:16.994014 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 09:58:07.114352913 +0000 UTC Feb 02 22:34:17 crc kubenswrapper[4755]: I0202 22:34:17.084793 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:17 crc kubenswrapper[4755]: I0202 22:34:17.085085 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:17 crc kubenswrapper[4755]: I0202 22:34:17.087672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:17 crc kubenswrapper[4755]: I0202 22:34:17.087769 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:17 crc kubenswrapper[4755]: I0202 22:34:17.087790 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:17 crc kubenswrapper[4755]: I0202 22:34:17.093106 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:17 crc kubenswrapper[4755]: I0202 22:34:17.192186 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:17 crc kubenswrapper[4755]: I0202 22:34:17.193512 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:17 crc kubenswrapper[4755]: I0202 22:34:17.193553 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:17 crc kubenswrapper[4755]: I0202 22:34:17.193570 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:17 crc kubenswrapper[4755]: I0202 22:34:17.198428 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:17 crc kubenswrapper[4755]: I0202 22:34:17.994168 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 18:15:39.488989719 +0000 UTC Feb 02 22:34:18 crc kubenswrapper[4755]: I0202 22:34:18.195383 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:18 crc kubenswrapper[4755]: I0202 22:34:18.196893 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:18 crc kubenswrapper[4755]: I0202 22:34:18.196952 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:18 crc kubenswrapper[4755]: I0202 22:34:18.196968 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:18 crc kubenswrapper[4755]: I0202 22:34:18.980919 4755 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Feb 02 22:34:18 crc kubenswrapper[4755]: W0202 22:34:18.991427 4755 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Feb 02 22:34:18 crc kubenswrapper[4755]: I0202 22:34:18.991538 4755 trace.go:236] Trace[769623221]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 22:34:08.989) (total time: 10001ms): Feb 02 22:34:18 crc kubenswrapper[4755]: Trace[769623221]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (22:34:18.991) Feb 02 22:34:18 crc kubenswrapper[4755]: Trace[769623221]: [10.001508819s] [10.001508819s] END Feb 02 22:34:18 crc kubenswrapper[4755]: E0202 22:34:18.991574 4755 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Feb 02 22:34:18 crc kubenswrapper[4755]: I0202 22:34:18.994807 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 00:51:15.612038867 +0000 UTC Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.201049 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.203819 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="641d71ea8d0c8570531c8d62f128f46fc2b2a80956b3500eaf5e71f85da8cfe7" exitCode=255 Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.203892 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"641d71ea8d0c8570531c8d62f128f46fc2b2a80956b3500eaf5e71f85da8cfe7"} Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.204789 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.206134 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.206188 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.206207 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.207028 4755 scope.go:117] "RemoveContainer" containerID="641d71ea8d0c8570531c8d62f128f46fc2b2a80956b3500eaf5e71f85da8cfe7" Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.269180 4755 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.269264 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.280129 4755 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.280186 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 02 22:34:19 crc kubenswrapper[4755]: I0202 22:34:19.995372 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 06:50:26.944549198 +0000 UTC Feb 02 22:34:20 crc kubenswrapper[4755]: I0202 22:34:20.218079 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 22:34:20 crc kubenswrapper[4755]: I0202 22:34:20.220494 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641"} Feb 02 22:34:20 crc kubenswrapper[4755]: I0202 22:34:20.220992 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:20 crc kubenswrapper[4755]: I0202 22:34:20.224777 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:20 crc kubenswrapper[4755]: I0202 22:34:20.224987 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:20 crc kubenswrapper[4755]: I0202 22:34:20.225123 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:20 crc kubenswrapper[4755]: I0202 22:34:20.995702 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 15:09:03.432329319 +0000 UTC Feb 02 22:34:21 crc kubenswrapper[4755]: I0202 22:34:21.686447 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:21 crc kubenswrapper[4755]: I0202 22:34:21.686656 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:21 crc kubenswrapper[4755]: I0202 22:34:21.686793 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:21 crc kubenswrapper[4755]: I0202 22:34:21.688170 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:21 crc kubenswrapper[4755]: I0202 22:34:21.688228 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:21 crc kubenswrapper[4755]: I0202 22:34:21.688249 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:21 crc kubenswrapper[4755]: I0202 22:34:21.693580 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:21 crc kubenswrapper[4755]: I0202 22:34:21.996648 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 21:05:06.565026546 +0000 UTC Feb 02 22:34:22 crc kubenswrapper[4755]: I0202 22:34:22.226187 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:22 crc kubenswrapper[4755]: I0202 22:34:22.227497 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:22 crc kubenswrapper[4755]: I0202 22:34:22.227560 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:22 crc kubenswrapper[4755]: I0202 22:34:22.227586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:22 crc kubenswrapper[4755]: I0202 22:34:22.997368 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 16:57:53.662222534 +0000 UTC Feb 02 22:34:23 crc kubenswrapper[4755]: I0202 22:34:23.228958 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:23 crc kubenswrapper[4755]: I0202 22:34:23.230260 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:23 crc kubenswrapper[4755]: I0202 22:34:23.230323 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:23 crc kubenswrapper[4755]: I0202 22:34:23.230346 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:23 crc kubenswrapper[4755]: I0202 22:34:23.998354 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 08:43:40.257080909 +0000 UTC Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.261102 4755 trace.go:236] Trace[1879833227]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 22:34:12.480) (total time: 11780ms): Feb 02 22:34:24 crc kubenswrapper[4755]: Trace[1879833227]: ---"Objects listed" error: 11780ms (22:34:24.260) Feb 02 22:34:24 crc kubenswrapper[4755]: Trace[1879833227]: [11.780375492s] [11.780375492s] END Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.261149 4755 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 02 22:34:24 crc kubenswrapper[4755]: E0202 22:34:24.266503 4755 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.272548 4755 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.273114 4755 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Feb 02 22:34:24 crc kubenswrapper[4755]: E0202 22:34:24.281492 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.283419 4755 trace.go:236] Trace[795347382]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 22:34:12.860) (total time: 11422ms): Feb 02 22:34:24 crc kubenswrapper[4755]: Trace[795347382]: ---"Objects listed" error: 11422ms (22:34:24.283) Feb 02 22:34:24 crc kubenswrapper[4755]: Trace[795347382]: [11.42243666s] [11.42243666s] END Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.283469 4755 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.287130 4755 trace.go:236] Trace[1280086630]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 22:34:13.649) (total time: 10637ms): Feb 02 22:34:24 crc kubenswrapper[4755]: Trace[1280086630]: ---"Objects listed" error: 10636ms (22:34:24.285) Feb 02 22:34:24 crc kubenswrapper[4755]: Trace[1280086630]: [10.637934143s] [10.637934143s] END Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.287176 4755 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.373515 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.373669 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.374891 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.374951 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.374970 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.379539 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.546882 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.547137 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.548450 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.548523 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.548600 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.564032 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Feb 02 22:34:24 crc kubenswrapper[4755]: I0202 22:34:24.999100 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 20:18:14.447192591 +0000 UTC Feb 02 22:34:25 crc kubenswrapper[4755]: E0202 22:34:25.140343 4755 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.234326 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.234327 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.235543 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.235597 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.235620 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.235615 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.235705 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.235761 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.288223 4755 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.978719 4755 apiserver.go:52] "Watching apiserver" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.983107 4755 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.983482 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.983899 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.984099 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.984325 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:25 crc kubenswrapper[4755]: E0202 22:34:25.984433 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:25 crc kubenswrapper[4755]: E0202 22:34:25.984533 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.985208 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.985381 4755 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.985442 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.985684 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.986358 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:25 crc kubenswrapper[4755]: E0202 22:34:25.986597 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.986624 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.988003 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.988201 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.989277 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.989286 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.989478 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.989534 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.990302 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 02 22:34:25 crc kubenswrapper[4755]: I0202 22:34:25.999264 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 03:17:25.754969819 +0000 UTC Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.035370 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.048410 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.060825 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.071917 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.083515 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.083572 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.083500 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.083608 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.083987 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.083997 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084029 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084130 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084136 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084163 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084203 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084259 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084288 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084319 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084344 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084352 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084372 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084402 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084426 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084449 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084477 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084504 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084516 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084566 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084568 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084595 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084621 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084658 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084682 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084705 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084745 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084770 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084784 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084788 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084822 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084852 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084881 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084913 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084936 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084961 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084976 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.084986 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085044 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085059 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085068 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085133 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085161 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085191 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085218 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085254 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085282 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085285 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085278 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085334 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085359 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085381 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085405 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085429 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085453 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085474 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085494 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085516 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085539 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085540 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085560 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085584 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085608 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085628 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085624 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085652 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085702 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085722 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085773 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085771 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085874 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086011 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086048 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086266 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086294 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086355 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086469 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086596 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086754 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.085796 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086844 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086884 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086921 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086959 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086991 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087020 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087053 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087084 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087115 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087146 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087178 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087210 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087248 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087282 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087331 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087365 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087400 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087431 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087468 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087500 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.086777 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087542 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087579 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087120 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087127 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087232 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087923 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087117 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087428 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087490 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087513 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087855 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.087536 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088238 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088300 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088321 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088362 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088473 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088497 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088527 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088594 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088604 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088658 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088686 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088721 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088761 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088787 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088812 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088836 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088910 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088944 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088966 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.088989 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089013 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089038 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089064 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089087 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089406 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089420 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089434 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089490 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089528 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089579 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089615 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089647 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089700 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089721 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089798 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089834 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089792 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089870 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089908 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089943 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089978 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.089987 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090016 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090053 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090057 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090090 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090075 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090124 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090144 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090157 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090197 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090231 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090233 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090267 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090304 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090337 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090361 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090365 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090395 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090421 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090448 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090479 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090511 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090571 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090603 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090630 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090664 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090695 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090725 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090783 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090818 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090852 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090887 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090921 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090956 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.090991 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091032 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091063 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091100 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091136 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091170 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091200 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091230 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091264 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091298 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091331 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091338 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091362 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091394 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091425 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091461 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091506 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091542 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091578 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091616 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091651 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091685 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091718 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091860 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091897 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091932 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091966 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092056 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092093 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092127 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092341 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092376 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092408 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092443 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092479 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092513 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092549 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092587 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092623 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092655 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092689 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092724 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092860 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092898 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092932 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092961 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092994 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093026 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093060 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093095 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093136 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093172 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093203 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093257 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093301 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093341 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093378 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093417 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093461 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093499 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093534 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093568 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093600 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093630 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093691 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093758 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093800 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093873 4755 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093892 4755 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093926 4755 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093944 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093966 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093986 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094007 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094027 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094048 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094068 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094089 4755 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094109 4755 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094129 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094146 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094165 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094188 4755 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094207 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094225 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094244 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094262 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094279 4755 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094300 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094319 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094338 4755 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094358 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094378 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094401 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094420 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094438 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094457 4755 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094474 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094492 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094509 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094527 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094544 4755 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094562 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094579 4755 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094597 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094614 4755 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094631 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094651 4755 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094670 4755 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094690 4755 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094709 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094754 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094775 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094794 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094818 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.101554 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091459 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.102457 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.102563 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:26.60253522 +0000 UTC m=+22.293755556 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.103127 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.103714 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091536 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091567 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091876 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091908 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.091951 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092350 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092419 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092460 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092554 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.092486 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093090 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093274 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093389 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093400 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093511 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.093637 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094282 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094664 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094674 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.094912 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.095077 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.095332 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.095419 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.095407 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.095590 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.103968 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.095605 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.095751 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.095656 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.095839 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.095962 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.096429 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.096521 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.096625 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.096912 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.096961 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.097438 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.097907 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.097904 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.097988 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.098069 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.097586 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.099865 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.100470 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.100545 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.100598 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.100640 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.100684 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.100684 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.101032 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.101279 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.101356 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.101555 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.102040 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.101227 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.103859 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.104441 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.104470 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:26.604438194 +0000 UTC m=+22.295658540 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.104636 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.104917 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.104127 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.104991 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.104229 4755 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.105286 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.105612 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.106377 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.107375 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.107605 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:34:26.607574761 +0000 UTC m=+22.298795127 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.108088 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.110234 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.110415 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.110464 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.111209 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.121378 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.122066 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.122132 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.122439 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.122557 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.123342 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.126024 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.126281 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.126320 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.126830 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.127267 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.127298 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.127468 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.128085 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.128315 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.128522 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.129000 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.129092 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.129231 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.129383 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.129867 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.129908 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.130377 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.131459 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.131526 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.131598 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.132055 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.132171 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.132379 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.132500 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.132655 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.132658 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.132707 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.132813 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.103960 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.132960 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.132975 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.133207 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.133468 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.133509 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.133539 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.133555 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.133630 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:26.63360721 +0000 UTC m=+22.324827546 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.133655 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.133672 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.133723 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.134484 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.134771 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.134842 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.134879 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.134933 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.135149 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:26.635089122 +0000 UTC m=+22.326309568 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.135643 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.135710 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.135813 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.136175 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.136679 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.136809 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.136917 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.136958 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.137010 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.137008 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.138164 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.138302 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.139135 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.139944 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.141543 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.141592 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.141749 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.142343 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.142638 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.143006 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.143635 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.143646 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.147903 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.147897 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.147981 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.148020 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.148002 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.148125 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.148140 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.148290 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.156760 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.157078 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.157448 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.163154 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.163386 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.171602 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.176864 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.184472 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.195300 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.195419 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.195440 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.195557 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.195572 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.195581 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.195599 4755 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.195618 4755 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.195636 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.195653 4755 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.195671 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196092 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196129 4755 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196148 4755 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196165 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196182 4755 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196199 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196215 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196233 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196249 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196265 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196283 4755 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196299 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196315 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196333 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196349 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196364 4755 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196379 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196396 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196413 4755 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196431 4755 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196448 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196463 4755 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196480 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196496 4755 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196511 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196526 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196544 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196560 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196576 4755 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196592 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196608 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196624 4755 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196642 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196659 4755 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196676 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196692 4755 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196708 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196724 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196762 4755 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196780 4755 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196799 4755 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196815 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196831 4755 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196848 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196864 4755 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196881 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196899 4755 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196916 4755 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196933 4755 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196950 4755 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196966 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196983 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.196999 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197014 4755 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197030 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197047 4755 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197064 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197080 4755 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197098 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197115 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197132 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197149 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197165 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197181 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197198 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197215 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197233 4755 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197249 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197266 4755 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197283 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197299 4755 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197317 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197331 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197347 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197364 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197379 4755 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197395 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197410 4755 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197427 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197443 4755 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197460 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197477 4755 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197494 4755 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197511 4755 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197527 4755 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197544 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197561 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197577 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197594 4755 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197611 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197630 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197648 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197664 4755 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197683 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197700 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197933 4755 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197957 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197973 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.197990 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198006 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198022 4755 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198038 4755 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198054 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198069 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198085 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198102 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198118 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198134 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198151 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198166 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198182 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198197 4755 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198213 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198229 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198244 4755 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198259 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198275 4755 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198293 4755 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198310 4755 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198326 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198343 4755 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198360 4755 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198377 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198393 4755 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198412 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198429 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198445 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198463 4755 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198479 4755 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198495 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198512 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198529 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198546 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198563 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198579 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198597 4755 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198612 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198628 4755 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.198644 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.240185 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.240793 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.242933 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641" exitCode=255 Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.242977 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641"} Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.243019 4755 scope.go:117] "RemoveContainer" containerID="641d71ea8d0c8570531c8d62f128f46fc2b2a80956b3500eaf5e71f85da8cfe7" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.262305 4755 scope.go:117] "RemoveContainer" containerID="e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.262367 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.262757 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.264805 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.277534 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.296599 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.305603 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.308122 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.320057 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.320144 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: W0202 22:34:26.321967 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-183fc13f2d6fcc37376118e89c7f1b3d253dcc102334f7bbaa6b88188378fb2a WatchSource:0}: Error finding container 183fc13f2d6fcc37376118e89c7f1b3d253dcc102334f7bbaa6b88188378fb2a: Status 404 returned error can't find the container with id 183fc13f2d6fcc37376118e89c7f1b3d253dcc102334f7bbaa6b88188378fb2a Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.329286 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.333518 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 22:34:26 crc kubenswrapper[4755]: W0202 22:34:26.334082 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-7f51b6222fffb0b0b9c13490e42ac3e5b61436cece9fed46322940bd7e550f0e WatchSource:0}: Error finding container 7f51b6222fffb0b0b9c13490e42ac3e5b61436cece9fed46322940bd7e550f0e: Status 404 returned error can't find the container with id 7f51b6222fffb0b0b9c13490e42ac3e5b61436cece9fed46322940bd7e550f0e Feb 02 22:34:26 crc kubenswrapper[4755]: W0202 22:34:26.353963 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-64d1c0a6379f053be2dfe9d7b534d6940acc2e198eb691ec48bf56c2ce74cbf0 WatchSource:0}: Error finding container 64d1c0a6379f053be2dfe9d7b534d6940acc2e198eb691ec48bf56c2ce74cbf0: Status 404 returned error can't find the container with id 64d1c0a6379f053be2dfe9d7b534d6940acc2e198eb691ec48bf56c2ce74cbf0 Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.702101 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.702207 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.702259 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702346 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:34:27.702309286 +0000 UTC m=+23.393529652 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702401 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702434 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702447 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702448 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702501 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:27.702487861 +0000 UTC m=+23.393708187 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.702405 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702526 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:27.702507182 +0000 UTC m=+23.393727538 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: I0202 22:34:26.702591 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702456 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702689 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:27.702673927 +0000 UTC m=+23.393894293 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702704 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702762 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702783 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:26 crc kubenswrapper[4755]: E0202 22:34:26.702857 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:27.702823181 +0000 UTC m=+23.394043537 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:26.999900 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 11:14:30.287023832 +0000 UTC Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.073090 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.073765 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.074763 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.075529 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.076284 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.076938 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.077672 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.078405 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.079222 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.079918 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.080582 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.081696 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.083907 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.085424 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.087151 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.087947 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.088754 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.089299 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.090056 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.090850 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.091479 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.092242 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.092835 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.093765 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.094434 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.095332 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.098963 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.100061 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.101334 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.102744 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.103368 4755 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.103501 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.106530 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.107313 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.107863 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.110757 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.112300 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.113356 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.114670 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.116075 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.117053 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.118345 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.119607 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.122123 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.123324 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.126155 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.128229 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.130238 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.131304 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.132249 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.133294 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.134389 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.135646 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.136642 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.251048 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726"} Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.251135 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"183fc13f2d6fcc37376118e89c7f1b3d253dcc102334f7bbaa6b88188378fb2a"} Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.256238 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.264488 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"64d1c0a6379f053be2dfe9d7b534d6940acc2e198eb691ec48bf56c2ce74cbf0"} Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.266718 4755 scope.go:117] "RemoveContainer" containerID="e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.267888 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f"} Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.267982 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f"} Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.268006 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7f51b6222fffb0b0b9c13490e42ac3e5b61436cece9fed46322940bd7e550f0e"} Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.268597 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.282580 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.299241 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.312679 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.325884 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.338083 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.357550 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.372176 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://641d71ea8d0c8570531c8d62f128f46fc2b2a80956b3500eaf5e71f85da8cfe7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:19Z\\\",\\\"message\\\":\\\"W0202 22:34:08.353923 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 22:34:08.354309 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770071648 cert, and key in /tmp/serving-cert-2485484168/serving-signer.crt, /tmp/serving-cert-2485484168/serving-signer.key\\\\nI0202 22:34:08.665170 1 observer_polling.go:159] Starting file observer\\\\nW0202 22:34:08.667389 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 22:34:08.667619 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:08.669607 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2485484168/tls.crt::/tmp/serving-cert-2485484168/tls.key\\\\\\\"\\\\nF0202 22:34:19.000037 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.383215 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.394949 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.405540 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.422056 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.435607 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.451601 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.466700 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.711314 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.711396 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.711427 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.711452 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:27 crc kubenswrapper[4755]: I0202 22:34:27.711477 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711579 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711626 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:29.71161337 +0000 UTC m=+25.402833696 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711673 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:34:29.711668132 +0000 UTC m=+25.402888458 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711741 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711756 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711768 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711795 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:29.711786765 +0000 UTC m=+25.403007091 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711832 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711856 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:29.711848977 +0000 UTC m=+25.403069303 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711905 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711916 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711925 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:27 crc kubenswrapper[4755]: E0202 22:34:27.711948 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:29.711941149 +0000 UTC m=+25.403161475 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:28 crc kubenswrapper[4755]: I0202 22:34:28.000031 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 06:54:56.273017987 +0000 UTC Feb 02 22:34:28 crc kubenswrapper[4755]: I0202 22:34:28.068634 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:28 crc kubenswrapper[4755]: I0202 22:34:28.068686 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:28 crc kubenswrapper[4755]: E0202 22:34:28.068953 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:28 crc kubenswrapper[4755]: I0202 22:34:28.068702 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:28 crc kubenswrapper[4755]: E0202 22:34:28.069210 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:28 crc kubenswrapper[4755]: E0202 22:34:28.069062 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:29 crc kubenswrapper[4755]: I0202 22:34:29.000923 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 15:32:24.297677099 +0000 UTC Feb 02 22:34:29 crc kubenswrapper[4755]: I0202 22:34:29.727499 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:29 crc kubenswrapper[4755]: I0202 22:34:29.727635 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:29 crc kubenswrapper[4755]: I0202 22:34:29.727684 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.727802 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:34:33.727766 +0000 UTC m=+29.418986366 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.727861 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.727888 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:29 crc kubenswrapper[4755]: I0202 22:34:29.727883 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.727907 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.727939 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.727967 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:33.727948525 +0000 UTC m=+29.419168881 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:29 crc kubenswrapper[4755]: I0202 22:34:29.727944 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.728071 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:33.728042758 +0000 UTC m=+29.419263114 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.728127 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.728169 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.728199 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.728221 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.728240 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:33.728214403 +0000 UTC m=+29.419434809 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:29 crc kubenswrapper[4755]: E0202 22:34:29.728292 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:33.728264334 +0000 UTC m=+29.419484750 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.001757 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 16:15:15.736114032 +0000 UTC Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.068678 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.068678 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:30 crc kubenswrapper[4755]: E0202 22:34:30.068960 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:30 crc kubenswrapper[4755]: E0202 22:34:30.069044 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.068768 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:30 crc kubenswrapper[4755]: E0202 22:34:30.069154 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.279297 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c"} Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.307521 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.331214 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.372369 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.405039 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.420987 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.436611 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.448582 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.657275 4755 csr.go:261] certificate signing request csr-v9t2f is approved, waiting to be issued Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.667362 4755 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.669619 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.669658 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.669668 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.669743 4755 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.685648 4755 kubelet_node_status.go:115] "Node was previously registered" node="crc" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.685912 4755 kubelet_node_status.go:79] "Successfully registered node" node="crc" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.686333 4755 csr.go:257] certificate signing request csr-v9t2f is issued Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.686821 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.686845 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.686862 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.686876 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.686885 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:30Z","lastTransitionTime":"2026-02-02T22:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:30 crc kubenswrapper[4755]: E0202 22:34:30.707297 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.712853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.712906 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.712919 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.712935 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.712948 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:30Z","lastTransitionTime":"2026-02-02T22:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:30 crc kubenswrapper[4755]: E0202 22:34:30.739872 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.743207 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.743241 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.743254 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.743271 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.743287 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:30Z","lastTransitionTime":"2026-02-02T22:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:30 crc kubenswrapper[4755]: E0202 22:34:30.764919 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.770464 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.770501 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.770512 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.770532 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.770544 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:30Z","lastTransitionTime":"2026-02-02T22:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:30 crc kubenswrapper[4755]: E0202 22:34:30.781016 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.784000 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.784042 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.784051 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.784066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.784075 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:30Z","lastTransitionTime":"2026-02-02T22:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:30 crc kubenswrapper[4755]: E0202 22:34:30.794694 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:30 crc kubenswrapper[4755]: E0202 22:34:30.794824 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.796331 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.796373 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.796387 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.796404 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.796416 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:30Z","lastTransitionTime":"2026-02-02T22:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.898789 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.898821 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.898829 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.898842 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:30 crc kubenswrapper[4755]: I0202 22:34:30.898851 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:30Z","lastTransitionTime":"2026-02-02T22:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.001592 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.001635 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.001647 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.001667 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.001682 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:31Z","lastTransitionTime":"2026-02-02T22:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.002385 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 06:13:30.063597362 +0000 UTC Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.104800 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.104844 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.104853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.104868 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.104879 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:31Z","lastTransitionTime":"2026-02-02T22:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.206765 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.206823 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.206836 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.206854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.206866 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:31Z","lastTransitionTime":"2026-02-02T22:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.309433 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.309470 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.309479 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.309494 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.309505 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:31Z","lastTransitionTime":"2026-02-02T22:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.411910 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.411948 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.411957 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.411971 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.411980 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:31Z","lastTransitionTime":"2026-02-02T22:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.514130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.514194 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.514204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.514219 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.514230 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:31Z","lastTransitionTime":"2026-02-02T22:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.533709 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-8q4mc"] Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.534072 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-rdh9w"] Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.534171 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.534327 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-rdh9w" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.535082 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-pgbrf"] Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.535587 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.535912 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-5fdlw"] Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.536041 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.536092 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.536291 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.536523 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.536713 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.536751 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.537459 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.537533 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.537851 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.538173 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.538174 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.539594 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.539612 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.542312 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.542531 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.543203 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.551785 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.565354 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.580230 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.602503 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.616986 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.617029 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.617038 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.617052 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.617061 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:31Z","lastTransitionTime":"2026-02-02T22:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.618096 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.634348 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.645961 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647261 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d9c0bf0a-73a9-42db-8ae6-716f712c0701-cnibin\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647307 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-multus-cni-dir\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647327 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-run-multus-certs\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647351 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d9c0bf0a-73a9-42db-8ae6-716f712c0701-cni-binary-copy\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647420 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-system-cni-dir\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647489 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d9c0bf0a-73a9-42db-8ae6-716f712c0701-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647553 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-run-k8s-cni-cncf-io\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647622 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bc686b0f-8473-46b8-9d5e-abcddcca635f-proxy-tls\") pod \"machine-config-daemon-8q4mc\" (UID: \"bc686b0f-8473-46b8-9d5e-abcddcca635f\") " pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647651 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-multus-socket-dir-parent\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647683 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d9c0bf0a-73a9-42db-8ae6-716f712c0701-tuning-conf-dir\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647703 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-etc-kubernetes\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647747 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4k9p\" (UniqueName: \"kubernetes.io/projected/bc686b0f-8473-46b8-9d5e-abcddcca635f-kube-api-access-f4k9p\") pod \"machine-config-daemon-8q4mc\" (UID: \"bc686b0f-8473-46b8-9d5e-abcddcca635f\") " pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647767 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d9c0bf0a-73a9-42db-8ae6-716f712c0701-system-cni-dir\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647786 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-multus-conf-dir\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647807 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/96c13e34-9c0d-4838-8353-f4ebd83ecf06-hosts-file\") pod \"node-resolver-rdh9w\" (UID: \"96c13e34-9c0d-4838-8353-f4ebd83ecf06\") " pod="openshift-dns/node-resolver-rdh9w" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647829 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqnlf\" (UniqueName: \"kubernetes.io/projected/96c13e34-9c0d-4838-8353-f4ebd83ecf06-kube-api-access-cqnlf\") pod \"node-resolver-rdh9w\" (UID: \"96c13e34-9c0d-4838-8353-f4ebd83ecf06\") " pod="openshift-dns/node-resolver-rdh9w" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647862 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-os-release\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647880 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/bc686b0f-8473-46b8-9d5e-abcddcca635f-rootfs\") pod \"machine-config-daemon-8q4mc\" (UID: \"bc686b0f-8473-46b8-9d5e-abcddcca635f\") " pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647900 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bc686b0f-8473-46b8-9d5e-abcddcca635f-mcd-auth-proxy-config\") pod \"machine-config-daemon-8q4mc\" (UID: \"bc686b0f-8473-46b8-9d5e-abcddcca635f\") " pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.647924 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-var-lib-cni-bin\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.648009 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-var-lib-cni-multus\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.648059 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-run-netns\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.648120 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twttk\" (UniqueName: \"kubernetes.io/projected/d9c0bf0a-73a9-42db-8ae6-716f712c0701-kube-api-access-twttk\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.648199 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d9c0bf0a-73a9-42db-8ae6-716f712c0701-os-release\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.648232 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-cnibin\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.648261 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/c206b6fd-200d-47ea-88a5-453f3093c749-multus-daemon-config\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.648298 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c206b6fd-200d-47ea-88a5-453f3093c749-cni-binary-copy\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.648327 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-var-lib-kubelet\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.648355 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-hostroot\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.648396 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmmxz\" (UniqueName: \"kubernetes.io/projected/c206b6fd-200d-47ea-88a5-453f3093c749-kube-api-access-cmmxz\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.661005 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.676361 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.688291 4755 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-02-02 22:29:30 +0000 UTC, rotation deadline is 2026-11-08 16:17:12.95401409 +0000 UTC Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.688565 4755 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6689h42m41.265453019s for next certificate rotation Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.692377 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.701353 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.713650 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.719000 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.719035 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.719045 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.719058 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.719067 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:31Z","lastTransitionTime":"2026-02-02T22:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.724810 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.736377 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.748944 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-multus-cni-dir\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.748995 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-run-multus-certs\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749027 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d9c0bf0a-73a9-42db-8ae6-716f712c0701-cnibin\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749060 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d9c0bf0a-73a9-42db-8ae6-716f712c0701-cni-binary-copy\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749097 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-system-cni-dir\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749130 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-run-k8s-cni-cncf-io\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749163 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d9c0bf0a-73a9-42db-8ae6-716f712c0701-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749168 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-run-multus-certs\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749194 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bc686b0f-8473-46b8-9d5e-abcddcca635f-proxy-tls\") pod \"machine-config-daemon-8q4mc\" (UID: \"bc686b0f-8473-46b8-9d5e-abcddcca635f\") " pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749243 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-multus-socket-dir-parent\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749268 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d9c0bf0a-73a9-42db-8ae6-716f712c0701-tuning-conf-dir\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749286 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-etc-kubernetes\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749295 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d9c0bf0a-73a9-42db-8ae6-716f712c0701-cnibin\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749305 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d9c0bf0a-73a9-42db-8ae6-716f712c0701-system-cni-dir\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749320 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-multus-conf-dir\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749337 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4k9p\" (UniqueName: \"kubernetes.io/projected/bc686b0f-8473-46b8-9d5e-abcddcca635f-kube-api-access-f4k9p\") pod \"machine-config-daemon-8q4mc\" (UID: \"bc686b0f-8473-46b8-9d5e-abcddcca635f\") " pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749355 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/96c13e34-9c0d-4838-8353-f4ebd83ecf06-hosts-file\") pod \"node-resolver-rdh9w\" (UID: \"96c13e34-9c0d-4838-8353-f4ebd83ecf06\") " pod="openshift-dns/node-resolver-rdh9w" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749386 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqnlf\" (UniqueName: \"kubernetes.io/projected/96c13e34-9c0d-4838-8353-f4ebd83ecf06-kube-api-access-cqnlf\") pod \"node-resolver-rdh9w\" (UID: \"96c13e34-9c0d-4838-8353-f4ebd83ecf06\") " pod="openshift-dns/node-resolver-rdh9w" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749376 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-system-cni-dir\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749415 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-os-release\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749441 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bc686b0f-8473-46b8-9d5e-abcddcca635f-mcd-auth-proxy-config\") pod \"machine-config-daemon-8q4mc\" (UID: \"bc686b0f-8473-46b8-9d5e-abcddcca635f\") " pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749449 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d9c0bf0a-73a9-42db-8ae6-716f712c0701-system-cni-dir\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749494 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-multus-conf-dir\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749538 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-multus-socket-dir-parent\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749337 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-run-k8s-cni-cncf-io\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749265 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-multus-cni-dir\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749459 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-var-lib-cni-bin\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749827 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/96c13e34-9c0d-4838-8353-f4ebd83ecf06-hosts-file\") pod \"node-resolver-rdh9w\" (UID: \"96c13e34-9c0d-4838-8353-f4ebd83ecf06\") " pod="openshift-dns/node-resolver-rdh9w" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749843 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-etc-kubernetes\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749862 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/bc686b0f-8473-46b8-9d5e-abcddcca635f-rootfs\") pod \"machine-config-daemon-8q4mc\" (UID: \"bc686b0f-8473-46b8-9d5e-abcddcca635f\") " pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749879 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-var-lib-cni-multus\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749911 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-var-lib-cni-multus\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749947 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d9c0bf0a-73a9-42db-8ae6-716f712c0701-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.749961 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/bc686b0f-8473-46b8-9d5e-abcddcca635f-rootfs\") pod \"machine-config-daemon-8q4mc\" (UID: \"bc686b0f-8473-46b8-9d5e-abcddcca635f\") " pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750022 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d9c0bf0a-73a9-42db-8ae6-716f712c0701-cni-binary-copy\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750038 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-var-lib-cni-bin\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750032 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-run-netns\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750064 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-run-netns\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750098 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twttk\" (UniqueName: \"kubernetes.io/projected/d9c0bf0a-73a9-42db-8ae6-716f712c0701-kube-api-access-twttk\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750116 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-os-release\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750133 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-cnibin\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750152 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/c206b6fd-200d-47ea-88a5-453f3093c749-multus-daemon-config\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750184 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d9c0bf0a-73a9-42db-8ae6-716f712c0701-tuning-conf-dir\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750220 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-cnibin\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750250 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d9c0bf0a-73a9-42db-8ae6-716f712c0701-os-release\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750272 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c206b6fd-200d-47ea-88a5-453f3093c749-cni-binary-copy\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750286 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-var-lib-kubelet\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.751014 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-hostroot\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750321 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-host-var-lib-kubelet\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.751070 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/c206b6fd-200d-47ea-88a5-453f3093c749-hostroot\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750502 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bc686b0f-8473-46b8-9d5e-abcddcca635f-mcd-auth-proxy-config\") pod \"machine-config-daemon-8q4mc\" (UID: \"bc686b0f-8473-46b8-9d5e-abcddcca635f\") " pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750703 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/c206b6fd-200d-47ea-88a5-453f3093c749-multus-daemon-config\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750299 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d9c0bf0a-73a9-42db-8ae6-716f712c0701-os-release\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.750946 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c206b6fd-200d-47ea-88a5-453f3093c749-cni-binary-copy\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.751036 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmmxz\" (UniqueName: \"kubernetes.io/projected/c206b6fd-200d-47ea-88a5-453f3093c749-kube-api-access-cmmxz\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.753686 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.756692 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bc686b0f-8473-46b8-9d5e-abcddcca635f-proxy-tls\") pod \"machine-config-daemon-8q4mc\" (UID: \"bc686b0f-8473-46b8-9d5e-abcddcca635f\") " pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.765926 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmmxz\" (UniqueName: \"kubernetes.io/projected/c206b6fd-200d-47ea-88a5-453f3093c749-kube-api-access-cmmxz\") pod \"multus-5fdlw\" (UID: \"c206b6fd-200d-47ea-88a5-453f3093c749\") " pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.767106 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.768849 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4k9p\" (UniqueName: \"kubernetes.io/projected/bc686b0f-8473-46b8-9d5e-abcddcca635f-kube-api-access-f4k9p\") pod \"machine-config-daemon-8q4mc\" (UID: \"bc686b0f-8473-46b8-9d5e-abcddcca635f\") " pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.768607 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqnlf\" (UniqueName: \"kubernetes.io/projected/96c13e34-9c0d-4838-8353-f4ebd83ecf06-kube-api-access-cqnlf\") pod \"node-resolver-rdh9w\" (UID: \"96c13e34-9c0d-4838-8353-f4ebd83ecf06\") " pod="openshift-dns/node-resolver-rdh9w" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.771720 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twttk\" (UniqueName: \"kubernetes.io/projected/d9c0bf0a-73a9-42db-8ae6-716f712c0701-kube-api-access-twttk\") pod \"multus-additional-cni-plugins-pgbrf\" (UID: \"d9c0bf0a-73a9-42db-8ae6-716f712c0701\") " pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.778324 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.790570 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.802741 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.815742 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.822057 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.822092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.822100 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.822116 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.822126 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:31Z","lastTransitionTime":"2026-02-02T22:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.845676 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.851640 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-rdh9w" Feb 02 22:34:31 crc kubenswrapper[4755]: W0202 22:34:31.857768 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc686b0f_8473_46b8_9d5e_abcddcca635f.slice/crio-caf582560956e61f0bd1f2e655e39275dd3c39a093f8655baf59295b297d5be2 WatchSource:0}: Error finding container caf582560956e61f0bd1f2e655e39275dd3c39a093f8655baf59295b297d5be2: Status 404 returned error can't find the container with id caf582560956e61f0bd1f2e655e39275dd3c39a093f8655baf59295b297d5be2 Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.858343 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.862681 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-5fdlw" Feb 02 22:34:31 crc kubenswrapper[4755]: W0202 22:34:31.867099 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96c13e34_9c0d_4838_8353_f4ebd83ecf06.slice/crio-e0517aa8f0a2f16016cc20290ce17bd2fa8ec65d3405189361735a0fade1a7ee WatchSource:0}: Error finding container e0517aa8f0a2f16016cc20290ce17bd2fa8ec65d3405189361735a0fade1a7ee: Status 404 returned error can't find the container with id e0517aa8f0a2f16016cc20290ce17bd2fa8ec65d3405189361735a0fade1a7ee Feb 02 22:34:31 crc kubenswrapper[4755]: W0202 22:34:31.879795 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9c0bf0a_73a9_42db_8ae6_716f712c0701.slice/crio-66187355544a6ed392925311860fbf0d53534d5c26b430b07eb987683251e85f WatchSource:0}: Error finding container 66187355544a6ed392925311860fbf0d53534d5c26b430b07eb987683251e85f: Status 404 returned error can't find the container with id 66187355544a6ed392925311860fbf0d53534d5c26b430b07eb987683251e85f Feb 02 22:34:31 crc kubenswrapper[4755]: W0202 22:34:31.880628 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc206b6fd_200d_47ea_88a5_453f3093c749.slice/crio-ec8e70fe895f8e0e96c0e6358f8f5d16ed0df8fca6d75c7db2b6dda5536ad6cb WatchSource:0}: Error finding container ec8e70fe895f8e0e96c0e6358f8f5d16ed0df8fca6d75c7db2b6dda5536ad6cb: Status 404 returned error can't find the container with id ec8e70fe895f8e0e96c0e6358f8f5d16ed0df8fca6d75c7db2b6dda5536ad6cb Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.924936 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4mblb"] Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.928348 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.928385 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.929040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.929070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.929119 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:31Z","lastTransitionTime":"2026-02-02T22:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.931323 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.932845 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.932926 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.933271 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.933404 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.933630 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.934537 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.934794 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.943849 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952199 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-run-ovn-kubernetes\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952251 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-slash\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952270 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-ovn\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952353 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-cni-bin\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952435 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-env-overrides\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952530 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-kubelet\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952563 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-run-netns\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952594 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-systemd-units\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952660 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-log-socket\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952699 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952744 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-openvswitch\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952779 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-cni-netd\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952809 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovnkube-script-lib\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.952920 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-var-lib-openvswitch\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.954486 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovn-node-metrics-cert\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.954656 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-systemd\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.955099 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovnkube-config\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.955135 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-etc-openvswitch\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.955165 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw4t5\" (UniqueName: \"kubernetes.io/projected/ae78d89e-7970-49df-8839-b1b6d7de4ec1-kube-api-access-rw4t5\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.955223 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-node-log\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.966400 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.977901 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.990131 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:31 crc kubenswrapper[4755]: I0202 22:34:31.999478 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:31Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.002590 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 01:46:53.638966416 +0000 UTC Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.009478 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.024673 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.032545 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.032586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.032601 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.032622 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.032637 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:32Z","lastTransitionTime":"2026-02-02T22:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.038749 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.050746 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.060938 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-systemd\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.060981 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovnkube-config\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061002 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-etc-openvswitch\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061020 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw4t5\" (UniqueName: \"kubernetes.io/projected/ae78d89e-7970-49df-8839-b1b6d7de4ec1-kube-api-access-rw4t5\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061054 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-node-log\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061075 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-run-ovn-kubernetes\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061103 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-slash\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061120 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-ovn\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061140 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-cni-bin\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061158 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-env-overrides\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061183 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-kubelet\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061200 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-run-netns\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061221 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-systemd-units\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061252 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-log-socket\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061273 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061293 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-openvswitch\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061313 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-cni-netd\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061332 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovnkube-script-lib\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061351 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-var-lib-openvswitch\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.061369 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovn-node-metrics-cert\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063362 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063397 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-run-netns\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063409 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-systemd\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063372 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-env-overrides\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063452 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-cni-netd\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063478 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-etc-openvswitch\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063476 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-kubelet\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063477 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-run-ovn-kubernetes\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063500 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-systemd-units\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063515 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-log-socket\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063522 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-slash\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063548 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-cni-bin\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063751 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-node-log\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063785 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-var-lib-openvswitch\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063804 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-openvswitch\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.063821 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-ovn\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.065675 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovnkube-script-lib\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.066001 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovnkube-config\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.067273 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovn-node-metrics-cert\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.067673 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.068055 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:32 crc kubenswrapper[4755]: E0202 22:34:32.068152 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.068429 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:32 crc kubenswrapper[4755]: E0202 22:34:32.068492 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.068549 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:32 crc kubenswrapper[4755]: E0202 22:34:32.068606 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.085345 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw4t5\" (UniqueName: \"kubernetes.io/projected/ae78d89e-7970-49df-8839-b1b6d7de4ec1-kube-api-access-rw4t5\") pod \"ovnkube-node-4mblb\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.096792 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.134434 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.135783 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.135850 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.135864 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.135904 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.135917 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:32Z","lastTransitionTime":"2026-02-02T22:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.238593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.238957 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.238968 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.239004 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.239014 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:32Z","lastTransitionTime":"2026-02-02T22:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.260830 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:32 crc kubenswrapper[4755]: W0202 22:34:32.274694 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae78d89e_7970_49df_8839_b1b6d7de4ec1.slice/crio-59354c8d6573fdee55706a130865ae6220464a942d3779cd9dea9509181468e9 WatchSource:0}: Error finding container 59354c8d6573fdee55706a130865ae6220464a942d3779cd9dea9509181468e9: Status 404 returned error can't find the container with id 59354c8d6573fdee55706a130865ae6220464a942d3779cd9dea9509181468e9 Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.286365 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-rdh9w" event={"ID":"96c13e34-9c0d-4838-8353-f4ebd83ecf06","Type":"ContainerStarted","Data":"d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.286414 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-rdh9w" event={"ID":"96c13e34-9c0d-4838-8353-f4ebd83ecf06","Type":"ContainerStarted","Data":"e0517aa8f0a2f16016cc20290ce17bd2fa8ec65d3405189361735a0fade1a7ee"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.288217 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"59354c8d6573fdee55706a130865ae6220464a942d3779cd9dea9509181468e9"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.295229 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" event={"ID":"d9c0bf0a-73a9-42db-8ae6-716f712c0701","Type":"ContainerStarted","Data":"40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.295295 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" event={"ID":"d9c0bf0a-73a9-42db-8ae6-716f712c0701","Type":"ContainerStarted","Data":"66187355544a6ed392925311860fbf0d53534d5c26b430b07eb987683251e85f"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.300443 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.300522 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.300545 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"caf582560956e61f0bd1f2e655e39275dd3c39a093f8655baf59295b297d5be2"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.302348 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.303342 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5fdlw" event={"ID":"c206b6fd-200d-47ea-88a5-453f3093c749","Type":"ContainerStarted","Data":"fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.303382 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5fdlw" event={"ID":"c206b6fd-200d-47ea-88a5-453f3093c749","Type":"ContainerStarted","Data":"ec8e70fe895f8e0e96c0e6358f8f5d16ed0df8fca6d75c7db2b6dda5536ad6cb"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.316592 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.326431 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.340714 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.340759 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.340771 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.340785 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.340795 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:32Z","lastTransitionTime":"2026-02-02T22:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.341544 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.356670 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.370834 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.386831 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.400713 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.415100 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.426510 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.442078 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.442844 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.442898 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.442917 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.442942 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.442959 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:32Z","lastTransitionTime":"2026-02-02T22:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.461686 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.475535 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.488869 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.507520 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.521943 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.534309 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.545708 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.545953 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.546023 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.546093 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.546162 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:32Z","lastTransitionTime":"2026-02-02T22:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.547760 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.562468 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.576061 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.593213 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.607474 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.624219 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.640942 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.648578 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.648844 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.648952 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.649037 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.649168 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:32Z","lastTransitionTime":"2026-02-02T22:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.751675 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.751773 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.751793 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.751818 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.751838 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:32Z","lastTransitionTime":"2026-02-02T22:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.854514 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.854553 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.854565 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.854582 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.854593 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:32Z","lastTransitionTime":"2026-02-02T22:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.902450 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.903142 4755 scope.go:117] "RemoveContainer" containerID="e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641" Feb 02 22:34:32 crc kubenswrapper[4755]: E0202 22:34:32.903311 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.957388 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.957449 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.957470 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.957490 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:32 crc kubenswrapper[4755]: I0202 22:34:32.957501 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:32Z","lastTransitionTime":"2026-02-02T22:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.003333 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 11:59:43.997673333 +0000 UTC Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.059550 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.059609 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.059624 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.059643 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.059655 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:33Z","lastTransitionTime":"2026-02-02T22:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.162514 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.162562 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.162574 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.162597 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.162610 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:33Z","lastTransitionTime":"2026-02-02T22:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.265196 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.265499 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.265512 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.265530 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.265542 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:33Z","lastTransitionTime":"2026-02-02T22:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.308528 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049" exitCode=0 Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.308611 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049"} Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.311718 4755 generic.go:334] "Generic (PLEG): container finished" podID="d9c0bf0a-73a9-42db-8ae6-716f712c0701" containerID="40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf" exitCode=0 Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.311790 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" event={"ID":"d9c0bf0a-73a9-42db-8ae6-716f712c0701","Type":"ContainerDied","Data":"40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf"} Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.331137 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.348421 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.360575 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.370676 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.370719 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.370764 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.370787 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.370802 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:33Z","lastTransitionTime":"2026-02-02T22:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.384535 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.400042 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.418605 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.431353 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.449525 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.463318 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.481722 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.482171 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.482213 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.482224 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.482240 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.482252 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:33Z","lastTransitionTime":"2026-02-02T22:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.497230 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.509408 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.520489 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.529757 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.544609 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.569144 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.580682 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.585137 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.585450 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.585466 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.585484 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.585496 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:33Z","lastTransitionTime":"2026-02-02T22:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.591469 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.607780 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.620484 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.633010 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.647559 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.661572 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.687837 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.687868 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.687879 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.687894 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.687906 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:33Z","lastTransitionTime":"2026-02-02T22:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.688808 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:33Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.777924 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.778027 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.778062 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.778087 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.778111 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778214 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778272 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:41.778257421 +0000 UTC m=+37.469477767 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778337 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778356 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778388 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778405 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778428 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:34:41.778397155 +0000 UTC m=+37.469617491 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778355 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778464 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778468 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:41.778452516 +0000 UTC m=+37.469672852 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778474 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778487 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:41.778478617 +0000 UTC m=+37.469698953 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:33 crc kubenswrapper[4755]: E0202 22:34:33.778513 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:41.778494648 +0000 UTC m=+37.469714994 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.790486 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.790537 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.790549 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.790569 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.790584 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:33Z","lastTransitionTime":"2026-02-02T22:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.892705 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.892960 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.892978 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.892992 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.893002 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:33Z","lastTransitionTime":"2026-02-02T22:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.994920 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.994959 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.994969 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.994986 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:33 crc kubenswrapper[4755]: I0202 22:34:33.994998 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:33Z","lastTransitionTime":"2026-02-02T22:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.003512 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 13:42:11.520746469 +0000 UTC Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.068443 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.068484 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:34 crc kubenswrapper[4755]: E0202 22:34:34.068546 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:34 crc kubenswrapper[4755]: E0202 22:34:34.068604 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.068489 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:34 crc kubenswrapper[4755]: E0202 22:34:34.068678 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.096838 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.096869 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.096879 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.096893 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.096902 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:34Z","lastTransitionTime":"2026-02-02T22:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.198926 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.198966 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.198980 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.198998 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.199009 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:34Z","lastTransitionTime":"2026-02-02T22:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.302162 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.302205 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.302215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.302230 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.302240 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:34Z","lastTransitionTime":"2026-02-02T22:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.324406 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.324449 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.324459 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.324469 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.324478 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.324487 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.327099 4755 generic.go:334] "Generic (PLEG): container finished" podID="d9c0bf0a-73a9-42db-8ae6-716f712c0701" containerID="3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd" exitCode=0 Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.327223 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" event={"ID":"d9c0bf0a-73a9-42db-8ae6-716f712c0701","Type":"ContainerDied","Data":"3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.348018 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.357242 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.369294 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.385163 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.396171 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.405823 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.405861 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.405870 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.405885 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.405919 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:34Z","lastTransitionTime":"2026-02-02T22:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.406939 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.419843 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.431164 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.445910 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.459034 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.472940 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.486533 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.509105 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.509159 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.509179 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.509202 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.509222 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:34Z","lastTransitionTime":"2026-02-02T22:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.513891 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-4j9p7"] Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.515308 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-4j9p7" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.517620 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.517635 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.517747 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.519071 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.531595 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.545021 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.562883 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.586631 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/11c7fccd-43ce-4bdf-9c2a-303a76526672-serviceca\") pod \"node-ca-4j9p7\" (UID: \"11c7fccd-43ce-4bdf-9c2a-303a76526672\") " pod="openshift-image-registry/node-ca-4j9p7" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.586764 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/11c7fccd-43ce-4bdf-9c2a-303a76526672-host\") pod \"node-ca-4j9p7\" (UID: \"11c7fccd-43ce-4bdf-9c2a-303a76526672\") " pod="openshift-image-registry/node-ca-4j9p7" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.586809 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd54h\" (UniqueName: \"kubernetes.io/projected/11c7fccd-43ce-4bdf-9c2a-303a76526672-kube-api-access-zd54h\") pod \"node-ca-4j9p7\" (UID: \"11c7fccd-43ce-4bdf-9c2a-303a76526672\") " pod="openshift-image-registry/node-ca-4j9p7" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.588760 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.603039 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.612416 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.612455 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.612464 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.612481 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.612490 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:34Z","lastTransitionTime":"2026-02-02T22:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.616824 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.632425 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.645140 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.659551 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.673994 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.687380 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/11c7fccd-43ce-4bdf-9c2a-303a76526672-host\") pod \"node-ca-4j9p7\" (UID: \"11c7fccd-43ce-4bdf-9c2a-303a76526672\") " pod="openshift-image-registry/node-ca-4j9p7" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.687457 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd54h\" (UniqueName: \"kubernetes.io/projected/11c7fccd-43ce-4bdf-9c2a-303a76526672-kube-api-access-zd54h\") pod \"node-ca-4j9p7\" (UID: \"11c7fccd-43ce-4bdf-9c2a-303a76526672\") " pod="openshift-image-registry/node-ca-4j9p7" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.687530 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/11c7fccd-43ce-4bdf-9c2a-303a76526672-host\") pod \"node-ca-4j9p7\" (UID: \"11c7fccd-43ce-4bdf-9c2a-303a76526672\") " pod="openshift-image-registry/node-ca-4j9p7" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.687538 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/11c7fccd-43ce-4bdf-9c2a-303a76526672-serviceca\") pod \"node-ca-4j9p7\" (UID: \"11c7fccd-43ce-4bdf-9c2a-303a76526672\") " pod="openshift-image-registry/node-ca-4j9p7" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.689335 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.690440 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/11c7fccd-43ce-4bdf-9c2a-303a76526672-serviceca\") pod \"node-ca-4j9p7\" (UID: \"11c7fccd-43ce-4bdf-9c2a-303a76526672\") " pod="openshift-image-registry/node-ca-4j9p7" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.710243 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd54h\" (UniqueName: \"kubernetes.io/projected/11c7fccd-43ce-4bdf-9c2a-303a76526672-kube-api-access-zd54h\") pod \"node-ca-4j9p7\" (UID: \"11c7fccd-43ce-4bdf-9c2a-303a76526672\") " pod="openshift-image-registry/node-ca-4j9p7" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.711008 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.714963 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.714997 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.715008 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.715024 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.715035 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:34Z","lastTransitionTime":"2026-02-02T22:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.733810 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:34Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.818072 4755 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Feb 02 22:34:34 crc kubenswrapper[4755]: W0202 22:34:34.820566 4755 reflector.go:484] object-"openshift-image-registry"/"node-ca-dockercfg-4777p": watch of *v1.Secret ended with: very short watch: object-"openshift-image-registry"/"node-ca-dockercfg-4777p": Unexpected watch close - watch lasted less than a second and no items received Feb 02 22:34:34 crc kubenswrapper[4755]: W0202 22:34:34.821303 4755 reflector.go:484] object-"openshift-image-registry"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Feb 02 22:34:34 crc kubenswrapper[4755]: W0202 22:34:34.821321 4755 reflector.go:484] object-"openshift-image-registry"/"image-registry-certificates": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"image-registry-certificates": Unexpected watch close - watch lasted less than a second and no items received Feb 02 22:34:34 crc kubenswrapper[4755]: W0202 22:34:34.821930 4755 reflector.go:484] object-"openshift-image-registry"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.824322 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.824360 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.824371 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.824386 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.824397 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:34Z","lastTransitionTime":"2026-02-02T22:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.841979 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-4j9p7" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.927279 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.927505 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.927513 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.927526 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:34 crc kubenswrapper[4755]: I0202 22:34:34.927535 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:34Z","lastTransitionTime":"2026-02-02T22:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.004147 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 05:34:31.600264373 +0000 UTC Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.032839 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.032879 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.032891 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.032907 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.032919 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:35Z","lastTransitionTime":"2026-02-02T22:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.081926 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.095321 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.113403 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.133663 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.135252 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.135279 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.135288 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.135302 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.135315 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:35Z","lastTransitionTime":"2026-02-02T22:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.145819 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.157419 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.169324 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.183764 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.196814 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.209134 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.222301 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.238830 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.238873 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.238884 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.238902 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.238914 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:35Z","lastTransitionTime":"2026-02-02T22:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.239031 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.253509 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.334021 4755 generic.go:334] "Generic (PLEG): container finished" podID="d9c0bf0a-73a9-42db-8ae6-716f712c0701" containerID="6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd" exitCode=0 Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.334141 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" event={"ID":"d9c0bf0a-73a9-42db-8ae6-716f712c0701","Type":"ContainerDied","Data":"6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.337215 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-4j9p7" event={"ID":"11c7fccd-43ce-4bdf-9c2a-303a76526672","Type":"ContainerStarted","Data":"03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.337253 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-4j9p7" event={"ID":"11c7fccd-43ce-4bdf-9c2a-303a76526672","Type":"ContainerStarted","Data":"1b35e92a815e1e0dbbc7ab700c4997e8a62ba7cf961bad1fbb4f1cb648464d13"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.342780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.342827 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.342845 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.342869 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.342885 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:35Z","lastTransitionTime":"2026-02-02T22:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.352831 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.366922 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.386112 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.404675 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.419030 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.430773 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.441230 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.445789 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.445825 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.445837 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.445855 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.445867 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:35Z","lastTransitionTime":"2026-02-02T22:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.455588 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.474882 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.486399 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.499608 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.514521 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.524652 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.535655 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.548427 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.548513 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.548526 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.548545 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.548558 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:35Z","lastTransitionTime":"2026-02-02T22:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.549168 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.564254 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.584806 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.627812 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.651131 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.651160 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.651168 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.651181 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.651190 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:35Z","lastTransitionTime":"2026-02-02T22:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.663924 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.705990 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.745346 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.754068 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.754167 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.754182 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.754195 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.754204 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:35Z","lastTransitionTime":"2026-02-02T22:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.785813 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.824897 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.857442 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.857481 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.857492 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.857507 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.857517 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:35Z","lastTransitionTime":"2026-02-02T22:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.868078 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.920102 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.951125 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.960573 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.960632 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.960651 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.960676 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:35 crc kubenswrapper[4755]: I0202 22:34:35.960695 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:35Z","lastTransitionTime":"2026-02-02T22:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.005131 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 13:52:22.413633249 +0000 UTC Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.031531 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.064380 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.064434 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.064452 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.064476 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.064493 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:36Z","lastTransitionTime":"2026-02-02T22:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.068833 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.068872 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.068833 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:36 crc kubenswrapper[4755]: E0202 22:34:36.069035 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:36 crc kubenswrapper[4755]: E0202 22:34:36.069111 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:36 crc kubenswrapper[4755]: E0202 22:34:36.069232 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.167901 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.167958 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.167976 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.168002 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.168018 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:36Z","lastTransitionTime":"2026-02-02T22:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.190574 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.243818 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.272218 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.272295 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.272314 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.272338 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.272357 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:36Z","lastTransitionTime":"2026-02-02T22:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.322268 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.348302 4755 generic.go:334] "Generic (PLEG): container finished" podID="d9c0bf0a-73a9-42db-8ae6-716f712c0701" containerID="1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c" exitCode=0 Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.348442 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" event={"ID":"d9c0bf0a-73a9-42db-8ae6-716f712c0701","Type":"ContainerDied","Data":"1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c"} Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.362092 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d"} Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.374055 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.374676 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.374757 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.374776 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.374797 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.374811 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:36Z","lastTransitionTime":"2026-02-02T22:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.387184 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.401476 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.416069 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.428419 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.444144 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.457422 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.469915 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.478018 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.478100 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.478122 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.478141 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.478191 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:36Z","lastTransitionTime":"2026-02-02T22:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.480826 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.489321 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.502226 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.521058 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.549489 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:36Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.580144 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.580192 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.580205 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.580222 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.580235 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:36Z","lastTransitionTime":"2026-02-02T22:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.682899 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.682967 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.682991 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.683025 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.683046 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:36Z","lastTransitionTime":"2026-02-02T22:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.786358 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.786406 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.786418 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.786437 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.786449 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:36Z","lastTransitionTime":"2026-02-02T22:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.888309 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.888349 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.888360 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.888376 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.888385 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:36Z","lastTransitionTime":"2026-02-02T22:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.995025 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.995054 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.995063 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.995077 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:36 crc kubenswrapper[4755]: I0202 22:34:36.995301 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:36Z","lastTransitionTime":"2026-02-02T22:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.006281 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 10:06:39.593081674 +0000 UTC Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.098482 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.098518 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.098551 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.098567 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.098582 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:37Z","lastTransitionTime":"2026-02-02T22:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.201488 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.201526 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.201539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.201555 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.201600 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:37Z","lastTransitionTime":"2026-02-02T22:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.305716 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.305814 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.305833 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.305859 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.305877 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:37Z","lastTransitionTime":"2026-02-02T22:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.367931 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" event={"ID":"d9c0bf0a-73a9-42db-8ae6-716f712c0701","Type":"ContainerStarted","Data":"6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d"} Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.382126 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.394532 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.406142 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.409261 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.409304 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.409318 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.409337 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.409349 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:37Z","lastTransitionTime":"2026-02-02T22:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.419689 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.440463 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.456922 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.477071 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.496634 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.511983 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.512025 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.512037 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.512056 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.512069 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:37Z","lastTransitionTime":"2026-02-02T22:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.512610 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.525354 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.540226 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.563343 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.578480 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:37Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.614722 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.614835 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.614861 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.614893 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.614912 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:37Z","lastTransitionTime":"2026-02-02T22:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.717788 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.717841 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.717853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.717871 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.717885 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:37Z","lastTransitionTime":"2026-02-02T22:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.823203 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.823275 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.823293 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.823318 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.823335 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:37Z","lastTransitionTime":"2026-02-02T22:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.925652 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.925714 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.925759 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.925786 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:37 crc kubenswrapper[4755]: I0202 22:34:37.925802 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:37Z","lastTransitionTime":"2026-02-02T22:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.006919 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 08:05:57.060055369 +0000 UTC Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.027926 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.028021 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.028043 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.028068 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.028088 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:38Z","lastTransitionTime":"2026-02-02T22:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.068525 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.068614 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.068662 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:38 crc kubenswrapper[4755]: E0202 22:34:38.068849 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:38 crc kubenswrapper[4755]: E0202 22:34:38.068971 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:38 crc kubenswrapper[4755]: E0202 22:34:38.069110 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.131236 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.131284 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.131298 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.131318 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.131335 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:38Z","lastTransitionTime":"2026-02-02T22:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.235365 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.235437 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.235457 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.235484 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.235501 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:38Z","lastTransitionTime":"2026-02-02T22:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.338526 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.338563 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.338575 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.338592 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.338604 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:38Z","lastTransitionTime":"2026-02-02T22:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.374324 4755 generic.go:334] "Generic (PLEG): container finished" podID="d9c0bf0a-73a9-42db-8ae6-716f712c0701" containerID="6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d" exitCode=0 Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.374378 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" event={"ID":"d9c0bf0a-73a9-42db-8ae6-716f712c0701","Type":"ContainerDied","Data":"6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d"} Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.400568 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.418586 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.438352 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.448763 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.448802 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.448819 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.448840 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.448855 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:38Z","lastTransitionTime":"2026-02-02T22:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.464051 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.479061 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.494629 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.508061 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.521320 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.536382 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.550302 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.552557 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.552593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.552602 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.552616 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.552626 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:38Z","lastTransitionTime":"2026-02-02T22:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.564778 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.579156 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.598659 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:38Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.655089 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.655130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.655144 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.655160 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.655170 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:38Z","lastTransitionTime":"2026-02-02T22:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.757445 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.757504 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.757524 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.757550 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.757569 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:38Z","lastTransitionTime":"2026-02-02T22:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.859874 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.859944 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.859969 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.860002 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.860029 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:38Z","lastTransitionTime":"2026-02-02T22:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.962998 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.963037 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.963063 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.963078 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:38 crc kubenswrapper[4755]: I0202 22:34:38.963087 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:38Z","lastTransitionTime":"2026-02-02T22:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.007966 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 14:04:31.190437975 +0000 UTC Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.065144 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.065179 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.065189 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.065203 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.065211 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:39Z","lastTransitionTime":"2026-02-02T22:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.167528 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.167576 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.167589 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.167607 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.167620 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:39Z","lastTransitionTime":"2026-02-02T22:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.270051 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.270110 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.270130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.270154 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.270173 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:39Z","lastTransitionTime":"2026-02-02T22:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.373480 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.373527 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.373540 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.373557 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.373571 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:39Z","lastTransitionTime":"2026-02-02T22:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.384121 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f"} Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.384447 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.384496 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.390311 4755 generic.go:334] "Generic (PLEG): container finished" podID="d9c0bf0a-73a9-42db-8ae6-716f712c0701" containerID="87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d" exitCode=0 Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.390368 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" event={"ID":"d9c0bf0a-73a9-42db-8ae6-716f712c0701","Type":"ContainerDied","Data":"87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d"} Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.412587 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.428761 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.430403 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.452598 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.475925 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.475999 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.476023 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.476054 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.476076 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:39Z","lastTransitionTime":"2026-02-02T22:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.480398 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.496675 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.515887 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.533558 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.548499 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.570711 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.580008 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.580058 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.580071 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.580092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.580106 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:39Z","lastTransitionTime":"2026-02-02T22:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.589567 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.602749 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.618274 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.635541 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.651054 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.665523 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.679200 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.683310 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.683361 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.683379 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.683402 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.683422 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:39Z","lastTransitionTime":"2026-02-02T22:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.691373 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.711512 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.728706 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.746853 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.764857 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.786337 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.786438 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.786456 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.786478 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.786493 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:39Z","lastTransitionTime":"2026-02-02T22:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.787864 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.804260 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.818929 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.835906 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.864868 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:39Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.889660 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.889764 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.889786 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.889811 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.889829 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:39Z","lastTransitionTime":"2026-02-02T22:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.993319 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.993394 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.993414 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.993439 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:39 crc kubenswrapper[4755]: I0202 22:34:39.993456 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:39Z","lastTransitionTime":"2026-02-02T22:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.008861 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 16:05:06.682378867 +0000 UTC Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.068567 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.068634 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.068634 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:40 crc kubenswrapper[4755]: E0202 22:34:40.068969 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:40 crc kubenswrapper[4755]: E0202 22:34:40.069195 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:40 crc kubenswrapper[4755]: E0202 22:34:40.069319 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.095883 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.095955 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.095979 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.096009 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.096030 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:40Z","lastTransitionTime":"2026-02-02T22:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.199020 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.199091 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.199111 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.199135 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.199153 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:40Z","lastTransitionTime":"2026-02-02T22:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.302495 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.302562 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.302581 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.302607 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.302625 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:40Z","lastTransitionTime":"2026-02-02T22:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.400365 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" event={"ID":"d9c0bf0a-73a9-42db-8ae6-716f712c0701","Type":"ContainerStarted","Data":"6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923"} Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.400984 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.406009 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.406071 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.406093 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.406119 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.406137 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:40Z","lastTransitionTime":"2026-02-02T22:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.426014 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.436257 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.444042 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.465404 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.498660 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.510891 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.510969 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.510990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.511020 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.511041 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:40Z","lastTransitionTime":"2026-02-02T22:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.521106 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.541125 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.561941 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.579188 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.599968 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.614389 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.614459 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.614483 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.614510 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.614528 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:40Z","lastTransitionTime":"2026-02-02T22:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.622772 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.644446 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.670645 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.690935 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.708158 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.716513 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.716551 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.716564 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.716580 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.716591 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:40Z","lastTransitionTime":"2026-02-02T22:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.723300 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.738769 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.759079 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.782497 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.803159 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.819274 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.819326 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.819343 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.819363 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.819381 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:40Z","lastTransitionTime":"2026-02-02T22:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.820615 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.837204 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.871941 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.891895 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.912184 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.922997 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.923052 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.923069 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.923092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.923108 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:40Z","lastTransitionTime":"2026-02-02T22:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.934321 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.937115 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.937194 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.937214 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.937709 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.937801 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:40Z","lastTransitionTime":"2026-02-02T22:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.953167 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: E0202 22:34:40.959526 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:40Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.965432 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.965485 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.965504 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.965569 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:40 crc kubenswrapper[4755]: I0202 22:34:40.965588 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:40Z","lastTransitionTime":"2026-02-02T22:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.006862 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:41Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.009813 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 11:01:00.672437741 +0000 UTC Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.013163 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.013280 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.013521 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.013841 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.013936 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:41Z","lastTransitionTime":"2026-02-02T22:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.029119 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:41Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.033850 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.033895 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.033909 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.033928 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.033944 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:41Z","lastTransitionTime":"2026-02-02T22:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.054097 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:41Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.059039 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.059102 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.059143 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.059172 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.059192 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:41Z","lastTransitionTime":"2026-02-02T22:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.087891 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:41Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.088111 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.090501 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.090565 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.090586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.090869 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.090907 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:41Z","lastTransitionTime":"2026-02-02T22:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.193363 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.193398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.193410 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.193426 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.193437 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:41Z","lastTransitionTime":"2026-02-02T22:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.295852 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.295923 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.295945 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.295972 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.295992 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:41Z","lastTransitionTime":"2026-02-02T22:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.399097 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.399236 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.399260 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.399349 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.399367 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:41Z","lastTransitionTime":"2026-02-02T22:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.502061 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.502124 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.502141 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.502165 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.502184 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:41Z","lastTransitionTime":"2026-02-02T22:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.605191 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.605257 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.605275 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.605299 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.605315 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:41Z","lastTransitionTime":"2026-02-02T22:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.708809 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.708857 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.708870 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.708889 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.708902 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:41Z","lastTransitionTime":"2026-02-02T22:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.811879 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.811959 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.811979 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.812005 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.812026 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:41Z","lastTransitionTime":"2026-02-02T22:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.873864 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.874080 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874109 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:34:57.874079434 +0000 UTC m=+53.565299770 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.874165 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.874220 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.874253 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874278 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874308 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874329 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874350 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874397 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:57.874375583 +0000 UTC m=+53.565595949 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874447 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:57.874420714 +0000 UTC m=+53.565641050 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874452 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874494 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:57.874485636 +0000 UTC m=+53.565705972 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874519 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874535 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874549 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:41 crc kubenswrapper[4755]: E0202 22:34:41.874589 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:57.874579658 +0000 UTC m=+53.565799994 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.915150 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.915197 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.915212 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.915231 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:41 crc kubenswrapper[4755]: I0202 22:34:41.915247 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:41Z","lastTransitionTime":"2026-02-02T22:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.010913 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 08:26:00.401971115 +0000 UTC Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.018580 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.018665 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.018690 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.018721 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.018790 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:42Z","lastTransitionTime":"2026-02-02T22:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.068309 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.068408 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:42 crc kubenswrapper[4755]: E0202 22:34:42.068506 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.068529 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:42 crc kubenswrapper[4755]: E0202 22:34:42.068614 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:42 crc kubenswrapper[4755]: E0202 22:34:42.068720 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.122269 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.122326 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.122347 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.122370 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.122388 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:42Z","lastTransitionTime":"2026-02-02T22:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.224285 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.224635 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.224895 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.225102 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.225270 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:42Z","lastTransitionTime":"2026-02-02T22:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.328089 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.328135 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.328159 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.328182 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.328204 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:42Z","lastTransitionTime":"2026-02-02T22:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.413041 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/0.log" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.417505 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f" exitCode=1 Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.417565 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f"} Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.418625 4755 scope.go:117] "RemoveContainer" containerID="82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.433744 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.433780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.433792 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.433808 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.433822 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:42Z","lastTransitionTime":"2026-02-02T22:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.441925 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.477321 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"202 22:34:41.782604 6042 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0202 22:34:41.782647 6042 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 22:34:41.782648 6042 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 22:34:41.782672 6042 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 22:34:41.782677 6042 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 22:34:41.782680 6042 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 22:34:41.782695 6042 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 22:34:41.782715 6042 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 22:34:41.782763 6042 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 22:34:41.782782 6042 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 22:34:41.782788 6042 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 22:34:41.782790 6042 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 22:34:41.782809 6042 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 22:34:41.782816 6042 factory.go:656] Stopping watch factory\\\\nI0202 22:34:41.782820 6042 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 22:34:41.782828 6042 ovnkube.go:599] Stopped ovnkube\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.503153 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.521182 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.536435 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.536524 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.536553 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.536588 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.536633 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:42Z","lastTransitionTime":"2026-02-02T22:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.538644 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.562186 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.575355 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.588193 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.612758 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.640329 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.642539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.643294 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.643319 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.643632 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.643657 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:42Z","lastTransitionTime":"2026-02-02T22:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.658124 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.671229 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.683312 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.747031 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.747083 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.747101 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.747124 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.747141 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:42Z","lastTransitionTime":"2026-02-02T22:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.849948 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.850002 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.850019 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.850041 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.850058 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:42Z","lastTransitionTime":"2026-02-02T22:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.953157 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.953196 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.953206 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.953221 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:42 crc kubenswrapper[4755]: I0202 22:34:42.953231 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:42Z","lastTransitionTime":"2026-02-02T22:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.012003 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 18:12:47.796020602 +0000 UTC Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.056546 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.056606 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.056625 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.056649 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.056672 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:43Z","lastTransitionTime":"2026-02-02T22:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.158657 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.158752 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.158766 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.158784 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.158796 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:43Z","lastTransitionTime":"2026-02-02T22:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.261537 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.261571 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.261582 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.261597 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.261607 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:43Z","lastTransitionTime":"2026-02-02T22:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.363965 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.364026 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.364045 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.364068 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.364087 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:43Z","lastTransitionTime":"2026-02-02T22:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.425663 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/0.log" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.430026 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294"} Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.430659 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.453470 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.467252 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.467330 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.467343 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.467375 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.467387 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:43Z","lastTransitionTime":"2026-02-02T22:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.469636 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.487502 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.519808 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"202 22:34:41.782604 6042 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0202 22:34:41.782647 6042 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 22:34:41.782648 6042 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 22:34:41.782672 6042 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 22:34:41.782677 6042 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 22:34:41.782680 6042 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 22:34:41.782695 6042 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 22:34:41.782715 6042 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 22:34:41.782763 6042 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 22:34:41.782782 6042 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 22:34:41.782788 6042 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 22:34:41.782790 6042 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 22:34:41.782809 6042 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 22:34:41.782816 6042 factory.go:656] Stopping watch factory\\\\nI0202 22:34:41.782820 6042 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 22:34:41.782828 6042 ovnkube.go:599] Stopped ovnkube\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.534572 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.547071 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.560998 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.570117 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.570172 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.570185 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.570215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.570228 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:43Z","lastTransitionTime":"2026-02-02T22:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.575652 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.589994 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc"] Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.590765 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.593125 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.594138 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.595271 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.611905 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.626373 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.639253 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.659974 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.673119 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.673163 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.673175 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.673195 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.673209 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:43Z","lastTransitionTime":"2026-02-02T22:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.675371 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.690080 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.692632 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8e5c9d46-6238-45de-b6ed-f633e92b3728-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-6ljzc\" (UID: \"8e5c9d46-6238-45de-b6ed-f633e92b3728\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.692724 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8e5c9d46-6238-45de-b6ed-f633e92b3728-env-overrides\") pod \"ovnkube-control-plane-749d76644c-6ljzc\" (UID: \"8e5c9d46-6238-45de-b6ed-f633e92b3728\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.692830 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8e5c9d46-6238-45de-b6ed-f633e92b3728-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-6ljzc\" (UID: \"8e5c9d46-6238-45de-b6ed-f633e92b3728\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.692917 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxztl\" (UniqueName: \"kubernetes.io/projected/8e5c9d46-6238-45de-b6ed-f633e92b3728-kube-api-access-vxztl\") pod \"ovnkube-control-plane-749d76644c-6ljzc\" (UID: \"8e5c9d46-6238-45de-b6ed-f633e92b3728\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.710932 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.732664 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"202 22:34:41.782604 6042 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0202 22:34:41.782647 6042 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 22:34:41.782648 6042 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 22:34:41.782672 6042 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 22:34:41.782677 6042 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 22:34:41.782680 6042 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 22:34:41.782695 6042 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 22:34:41.782715 6042 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 22:34:41.782763 6042 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 22:34:41.782782 6042 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 22:34:41.782788 6042 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 22:34:41.782790 6042 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 22:34:41.782809 6042 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 22:34:41.782816 6042 factory.go:656] Stopping watch factory\\\\nI0202 22:34:41.782820 6042 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 22:34:41.782828 6042 ovnkube.go:599] Stopped ovnkube\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.744747 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.759857 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.775151 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.775577 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.775609 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.775623 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.775640 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.775652 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:43Z","lastTransitionTime":"2026-02-02T22:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.787414 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.794543 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8e5c9d46-6238-45de-b6ed-f633e92b3728-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-6ljzc\" (UID: \"8e5c9d46-6238-45de-b6ed-f633e92b3728\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.794600 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxztl\" (UniqueName: \"kubernetes.io/projected/8e5c9d46-6238-45de-b6ed-f633e92b3728-kube-api-access-vxztl\") pod \"ovnkube-control-plane-749d76644c-6ljzc\" (UID: \"8e5c9d46-6238-45de-b6ed-f633e92b3728\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.794688 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8e5c9d46-6238-45de-b6ed-f633e92b3728-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-6ljzc\" (UID: \"8e5c9d46-6238-45de-b6ed-f633e92b3728\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.794722 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8e5c9d46-6238-45de-b6ed-f633e92b3728-env-overrides\") pod \"ovnkube-control-plane-749d76644c-6ljzc\" (UID: \"8e5c9d46-6238-45de-b6ed-f633e92b3728\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.795526 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8e5c9d46-6238-45de-b6ed-f633e92b3728-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-6ljzc\" (UID: \"8e5c9d46-6238-45de-b6ed-f633e92b3728\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.795976 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8e5c9d46-6238-45de-b6ed-f633e92b3728-env-overrides\") pod \"ovnkube-control-plane-749d76644c-6ljzc\" (UID: \"8e5c9d46-6238-45de-b6ed-f633e92b3728\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.800944 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8e5c9d46-6238-45de-b6ed-f633e92b3728-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-6ljzc\" (UID: \"8e5c9d46-6238-45de-b6ed-f633e92b3728\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.808203 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.815605 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxztl\" (UniqueName: \"kubernetes.io/projected/8e5c9d46-6238-45de-b6ed-f633e92b3728-kube-api-access-vxztl\") pod \"ovnkube-control-plane-749d76644c-6ljzc\" (UID: \"8e5c9d46-6238-45de-b6ed-f633e92b3728\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.821343 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.837147 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.855301 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.875700 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.878801 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.878845 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.878859 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.878878 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.878892 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:43Z","lastTransitionTime":"2026-02-02T22:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.898157 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:43Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.905424 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.992257 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.992326 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.992349 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.992377 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:43 crc kubenswrapper[4755]: I0202 22:34:43.992399 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:43Z","lastTransitionTime":"2026-02-02T22:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.013103 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 14:06:05.911682395 +0000 UTC Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.068807 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.068807 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:44 crc kubenswrapper[4755]: E0202 22:34:44.069051 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.069061 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:44 crc kubenswrapper[4755]: E0202 22:34:44.069256 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:44 crc kubenswrapper[4755]: E0202 22:34:44.069376 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.094867 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.094989 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.095011 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.095040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.095060 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:44Z","lastTransitionTime":"2026-02-02T22:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.197912 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.197990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.198016 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.198049 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.198102 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:44Z","lastTransitionTime":"2026-02-02T22:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.303246 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.303341 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.303366 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.303395 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.303422 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:44Z","lastTransitionTime":"2026-02-02T22:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.407309 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.407382 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.407397 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.407419 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.407435 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:44Z","lastTransitionTime":"2026-02-02T22:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.437305 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/1.log" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.438075 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/0.log" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.442558 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294" exitCode=1 Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.442602 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294"} Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.442658 4755 scope.go:117] "RemoveContainer" containerID="82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.444218 4755 scope.go:117] "RemoveContainer" containerID="33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294" Feb 02 22:34:44 crc kubenswrapper[4755]: E0202 22:34:44.444558 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.444786 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" event={"ID":"8e5c9d46-6238-45de-b6ed-f633e92b3728","Type":"ContainerStarted","Data":"7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2"} Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.444830 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" event={"ID":"8e5c9d46-6238-45de-b6ed-f633e92b3728","Type":"ContainerStarted","Data":"2ac010b0ce13d96cc24e06b6c15daa4755ec27ad662a2a176991d79036ad72f5"} Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.467359 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.486662 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.509361 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.512309 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.512364 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.512391 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.512422 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.512440 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:44Z","lastTransitionTime":"2026-02-02T22:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.532888 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.547183 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.561399 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.576013 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.599249 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.616018 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.616066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.616078 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.616097 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.616110 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:44Z","lastTransitionTime":"2026-02-02T22:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.631757 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"202 22:34:41.782604 6042 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0202 22:34:41.782647 6042 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 22:34:41.782648 6042 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 22:34:41.782672 6042 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 22:34:41.782677 6042 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 22:34:41.782680 6042 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 22:34:41.782695 6042 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 22:34:41.782715 6042 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 22:34:41.782763 6042 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 22:34:41.782782 6042 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 22:34:41.782788 6042 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 22:34:41.782790 6042 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 22:34:41.782809 6042 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 22:34:41.782816 6042 factory.go:656] Stopping watch factory\\\\nI0202 22:34:41.782820 6042 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 22:34:41.782828 6042 ovnkube.go:599] Stopped ovnkube\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:44Z\\\",\\\"message\\\":\\\"ecycle-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 22:34:43.693285 6210 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:34:43.693406 6210 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.654647 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.674031 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.691979 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.711099 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.718296 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.718329 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.718338 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.718351 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.718360 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:44Z","lastTransitionTime":"2026-02-02T22:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.725722 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:44Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.822786 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.823066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.823259 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.823497 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.823773 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:44Z","lastTransitionTime":"2026-02-02T22:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.927811 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.927872 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.927890 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.927915 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:44 crc kubenswrapper[4755]: I0202 22:34:44.927933 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:44Z","lastTransitionTime":"2026-02-02T22:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.013776 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 08:55:40.218321426 +0000 UTC Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.030969 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.031025 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.031045 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.031070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.031087 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:45Z","lastTransitionTime":"2026-02-02T22:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.091033 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.110566 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.131192 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.133907 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.134094 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.134275 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.134420 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.134549 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:45Z","lastTransitionTime":"2026-02-02T22:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.167095 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"202 22:34:41.782604 6042 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0202 22:34:41.782647 6042 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 22:34:41.782648 6042 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 22:34:41.782672 6042 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 22:34:41.782677 6042 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 22:34:41.782680 6042 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 22:34:41.782695 6042 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 22:34:41.782715 6042 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 22:34:41.782763 6042 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 22:34:41.782782 6042 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 22:34:41.782788 6042 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 22:34:41.782790 6042 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 22:34:41.782809 6042 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 22:34:41.782816 6042 factory.go:656] Stopping watch factory\\\\nI0202 22:34:41.782820 6042 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 22:34:41.782828 6042 ovnkube.go:599] Stopped ovnkube\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:44Z\\\",\\\"message\\\":\\\"ecycle-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 22:34:43.693285 6210 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:34:43.693406 6210 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.184294 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.201650 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.218611 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.234812 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.237020 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.237065 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.237081 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.237102 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.237118 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:45Z","lastTransitionTime":"2026-02-02T22:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.256444 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.274813 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.291982 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.311008 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.332576 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.340033 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.340072 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.340085 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.340103 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.340115 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:45Z","lastTransitionTime":"2026-02-02T22:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.346336 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.443776 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.443854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.443874 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.443941 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.443965 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:45Z","lastTransitionTime":"2026-02-02T22:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.451419 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" event={"ID":"8e5c9d46-6238-45de-b6ed-f633e92b3728","Type":"ContainerStarted","Data":"4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218"} Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.454693 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/1.log" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.466134 4755 scope.go:117] "RemoveContainer" containerID="33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294" Feb 02 22:34:45 crc kubenswrapper[4755]: E0202 22:34:45.466656 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.478802 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.498320 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.517676 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.536764 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.543757 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-k8tml"] Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.544520 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:45 crc kubenswrapper[4755]: E0202 22:34:45.544613 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.546761 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.546817 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.546843 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.546871 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.546891 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:45Z","lastTransitionTime":"2026-02-02T22:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.564053 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.588008 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.610513 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.610714 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c55pv\" (UniqueName: \"kubernetes.io/projected/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-kube-api-access-c55pv\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.610849 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.630050 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.648278 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.650517 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.650631 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.650704 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.650794 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.650823 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:45Z","lastTransitionTime":"2026-02-02T22:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.662314 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.681354 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.711681 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c55pv\" (UniqueName: \"kubernetes.io/projected/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-kube-api-access-c55pv\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.711788 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:45 crc kubenswrapper[4755]: E0202 22:34:45.711941 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:34:45 crc kubenswrapper[4755]: E0202 22:34:45.712002 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs podName:3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:46.211981861 +0000 UTC m=+41.903202197 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs") pod "network-metrics-daemon-k8tml" (UID: "3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.711862 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82aa37aadae597f97724a4f33d2a44cc546d1dd272db0e1652c32dea9a6dd16f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:41Z\\\",\\\"message\\\":\\\"202 22:34:41.782604 6042 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0202 22:34:41.782647 6042 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 22:34:41.782648 6042 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 22:34:41.782672 6042 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 22:34:41.782677 6042 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0202 22:34:41.782680 6042 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 22:34:41.782695 6042 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 22:34:41.782715 6042 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 22:34:41.782763 6042 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 22:34:41.782782 6042 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 22:34:41.782788 6042 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 22:34:41.782790 6042 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 22:34:41.782809 6042 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 22:34:41.782816 6042 factory.go:656] Stopping watch factory\\\\nI0202 22:34:41.782820 6042 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 22:34:41.782828 6042 ovnkube.go:599] Stopped ovnkube\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:44Z\\\",\\\"message\\\":\\\"ecycle-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 22:34:43.693285 6210 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:34:43.693406 6210 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.732858 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.740611 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c55pv\" (UniqueName: \"kubernetes.io/projected/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-kube-api-access-c55pv\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.749950 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.758365 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.758416 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.758432 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.758453 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.758467 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:45Z","lastTransitionTime":"2026-02-02T22:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.767709 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.799010 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:44Z\\\",\\\"message\\\":\\\"ecycle-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 22:34:43.693285 6210 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:34:43.693406 6210 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.813424 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.827931 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.842462 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.861508 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.862284 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.862346 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.862370 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.862407 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.862430 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:45Z","lastTransitionTime":"2026-02-02T22:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.875856 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.892371 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.908145 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.922908 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.945904 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.965179 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.965226 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.965249 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.965281 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.965303 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:45Z","lastTransitionTime":"2026-02-02T22:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.966823 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:45 crc kubenswrapper[4755]: I0202 22:34:45.987092 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:45Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.006674 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.014109 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 09:39:15.56748707 +0000 UTC Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.026162 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.068144 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.068285 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.068399 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.068571 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.068602 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.068619 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.068643 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.068667 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:46Z","lastTransitionTime":"2026-02-02T22:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:46 crc kubenswrapper[4755]: E0202 22:34:46.068694 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:46 crc kubenswrapper[4755]: E0202 22:34:46.068586 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:46 crc kubenswrapper[4755]: E0202 22:34:46.069307 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.069713 4755 scope.go:117] "RemoveContainer" containerID="e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.177301 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.177350 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.177369 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.177593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.178633 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:46Z","lastTransitionTime":"2026-02-02T22:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.216582 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:46 crc kubenswrapper[4755]: E0202 22:34:46.216805 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:34:46 crc kubenswrapper[4755]: E0202 22:34:46.216888 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs podName:3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:47.21686566 +0000 UTC m=+42.908086016 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs") pod "network-metrics-daemon-k8tml" (UID: "3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.281927 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.281975 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.281987 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.282006 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.282023 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:46Z","lastTransitionTime":"2026-02-02T22:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.385453 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.385495 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.385509 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.385525 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.385537 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:46Z","lastTransitionTime":"2026-02-02T22:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.469444 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.472594 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d"} Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.473369 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.488231 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.489029 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.489084 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.489099 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.489122 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.489136 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:46Z","lastTransitionTime":"2026-02-02T22:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.506604 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.520843 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.548794 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.577002 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:44Z\\\",\\\"message\\\":\\\"ecycle-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 22:34:43.693285 6210 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:34:43.693406 6210 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.589610 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.591630 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.591710 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.591773 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.591807 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.591831 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:46Z","lastTransitionTime":"2026-02-02T22:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.603573 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.621754 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.638687 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.653514 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.674378 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.695158 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.695207 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.695226 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.695249 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.695265 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:46Z","lastTransitionTime":"2026-02-02T22:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.697689 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.714840 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.732379 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.758341 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:46Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.797689 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.797761 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.797783 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.797804 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.797821 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:46Z","lastTransitionTime":"2026-02-02T22:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.900526 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.900567 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.900578 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.900593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:46 crc kubenswrapper[4755]: I0202 22:34:46.900603 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:46Z","lastTransitionTime":"2026-02-02T22:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.003480 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.003539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.003565 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.003595 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.003616 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:47Z","lastTransitionTime":"2026-02-02T22:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.015244 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 09:01:35.038319615 +0000 UTC Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.067896 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:47 crc kubenswrapper[4755]: E0202 22:34:47.068131 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.105847 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.105908 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.105926 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.105962 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.105978 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:47Z","lastTransitionTime":"2026-02-02T22:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.209175 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.209240 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.209252 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.209269 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.209280 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:47Z","lastTransitionTime":"2026-02-02T22:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.229994 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:47 crc kubenswrapper[4755]: E0202 22:34:47.230169 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:34:47 crc kubenswrapper[4755]: E0202 22:34:47.230249 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs podName:3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:49.230227338 +0000 UTC m=+44.921447704 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs") pod "network-metrics-daemon-k8tml" (UID: "3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.311906 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.311960 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.311978 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.312003 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.312021 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:47Z","lastTransitionTime":"2026-02-02T22:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.413938 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.414024 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.414043 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.414071 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.414088 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:47Z","lastTransitionTime":"2026-02-02T22:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.516587 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.516633 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.516645 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.516664 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.516676 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:47Z","lastTransitionTime":"2026-02-02T22:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.619930 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.619998 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.620016 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.620043 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.620059 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:47Z","lastTransitionTime":"2026-02-02T22:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.723153 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.723206 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.723227 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.723250 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.723267 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:47Z","lastTransitionTime":"2026-02-02T22:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.826451 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.826842 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.826989 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.827169 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.827307 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:47Z","lastTransitionTime":"2026-02-02T22:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.930952 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.931009 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.931028 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.931052 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:47 crc kubenswrapper[4755]: I0202 22:34:47.931069 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:47Z","lastTransitionTime":"2026-02-02T22:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.016024 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 17:11:07.814357232 +0000 UTC Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.033964 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.034033 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.034055 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.034082 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.034102 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:48Z","lastTransitionTime":"2026-02-02T22:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.068546 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.068574 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:48 crc kubenswrapper[4755]: E0202 22:34:48.068716 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:48 crc kubenswrapper[4755]: E0202 22:34:48.068862 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.068575 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:48 crc kubenswrapper[4755]: E0202 22:34:48.069514 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.137836 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.138195 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.138479 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.138632 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.138832 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:48Z","lastTransitionTime":"2026-02-02T22:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.241561 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.241944 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.242160 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.242333 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.242486 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:48Z","lastTransitionTime":"2026-02-02T22:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.345971 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.346061 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.346081 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.346105 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.346124 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:48Z","lastTransitionTime":"2026-02-02T22:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.448470 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.448541 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.448565 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.448591 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.448612 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:48Z","lastTransitionTime":"2026-02-02T22:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.551033 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.551092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.551113 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.551137 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.551158 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:48Z","lastTransitionTime":"2026-02-02T22:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.654110 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.654168 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.654187 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.654210 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.654227 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:48Z","lastTransitionTime":"2026-02-02T22:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.757926 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.758053 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.758076 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.758103 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.758122 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:48Z","lastTransitionTime":"2026-02-02T22:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.861270 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.861332 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.861346 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.861364 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.861376 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:48Z","lastTransitionTime":"2026-02-02T22:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.964189 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.964250 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.964268 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.964292 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:48 crc kubenswrapper[4755]: I0202 22:34:48.964316 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:48Z","lastTransitionTime":"2026-02-02T22:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.016520 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 01:30:31.126049562 +0000 UTC Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.067217 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.067277 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.067294 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.067317 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.067334 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:49Z","lastTransitionTime":"2026-02-02T22:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.068354 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:49 crc kubenswrapper[4755]: E0202 22:34:49.068589 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.170527 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.170591 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.170609 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.170635 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.170654 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:49Z","lastTransitionTime":"2026-02-02T22:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.251647 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:49 crc kubenswrapper[4755]: E0202 22:34:49.251904 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:34:49 crc kubenswrapper[4755]: E0202 22:34:49.252007 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs podName:3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923 nodeName:}" failed. No retries permitted until 2026-02-02 22:34:53.251980064 +0000 UTC m=+48.943200420 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs") pod "network-metrics-daemon-k8tml" (UID: "3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.273398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.273463 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.273480 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.273504 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.273526 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:49Z","lastTransitionTime":"2026-02-02T22:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.376671 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.376759 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.376784 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.376817 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.376840 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:49Z","lastTransitionTime":"2026-02-02T22:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.480059 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.480129 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.480153 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.480183 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.480204 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:49Z","lastTransitionTime":"2026-02-02T22:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.583938 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.584051 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.584116 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.584144 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.584177 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:49Z","lastTransitionTime":"2026-02-02T22:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.687260 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.687363 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.687389 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.687436 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.687463 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:49Z","lastTransitionTime":"2026-02-02T22:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.790926 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.790995 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.791016 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.791043 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.791062 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:49Z","lastTransitionTime":"2026-02-02T22:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.893945 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.893995 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.894013 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.894036 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.894055 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:49Z","lastTransitionTime":"2026-02-02T22:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.996833 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.996905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.996924 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.996952 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:49 crc kubenswrapper[4755]: I0202 22:34:49.996970 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:49Z","lastTransitionTime":"2026-02-02T22:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.016998 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 07:26:31.522097041 +0000 UTC Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.068330 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.068426 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.068480 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:50 crc kubenswrapper[4755]: E0202 22:34:50.068685 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:50 crc kubenswrapper[4755]: E0202 22:34:50.068857 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:50 crc kubenswrapper[4755]: E0202 22:34:50.068951 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.099909 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.099984 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.100009 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.100040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.100066 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:50Z","lastTransitionTime":"2026-02-02T22:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.202898 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.202975 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.202992 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.203018 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.203038 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:50Z","lastTransitionTime":"2026-02-02T22:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.305760 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.305889 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.305912 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.305935 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.305952 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:50Z","lastTransitionTime":"2026-02-02T22:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.409243 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.409292 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.409309 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.409332 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.409351 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:50Z","lastTransitionTime":"2026-02-02T22:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.512149 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.512213 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.512230 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.512252 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.512269 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:50Z","lastTransitionTime":"2026-02-02T22:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.615440 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.615515 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.615535 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.615560 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.615578 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:50Z","lastTransitionTime":"2026-02-02T22:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.718588 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.718652 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.718709 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.718764 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.718785 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:50Z","lastTransitionTime":"2026-02-02T22:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.821685 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.821776 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.821795 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.821819 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.821836 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:50Z","lastTransitionTime":"2026-02-02T22:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.925471 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.925544 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.925568 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.925601 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:50 crc kubenswrapper[4755]: I0202 22:34:50.925626 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:50Z","lastTransitionTime":"2026-02-02T22:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.018087 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 04:43:20.065476754 +0000 UTC Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.031713 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.031871 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.031892 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.031925 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.031945 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.068966 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:51 crc kubenswrapper[4755]: E0202 22:34:51.069247 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.135882 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.135952 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.135973 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.136000 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.136019 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.144849 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.144919 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.144943 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.144972 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.144995 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: E0202 22:34:51.164797 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:51Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.169616 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.169670 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.169691 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.169716 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.169762 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: E0202 22:34:51.188608 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:51Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.193107 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.193157 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.193175 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.193198 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.193215 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: E0202 22:34:51.212636 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:51Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.227524 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.227615 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.227635 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.227665 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.227684 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: E0202 22:34:51.254545 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:51Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.259885 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.259957 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.259978 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.260004 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.260022 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: E0202 22:34:51.280523 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:51Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:51 crc kubenswrapper[4755]: E0202 22:34:51.280777 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.283632 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.283681 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.283699 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.283722 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.283764 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.386851 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.386892 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.386901 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.386916 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.386925 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.489367 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.489436 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.489456 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.489482 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.489501 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.592420 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.592654 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.592724 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.592809 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.592870 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.696749 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.697029 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.697180 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.697325 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.697454 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.800383 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.800433 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.800451 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.800472 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.800488 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.903874 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.903945 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.903967 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.903997 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:51 crc kubenswrapper[4755]: I0202 22:34:51.904037 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:51Z","lastTransitionTime":"2026-02-02T22:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.007756 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.007814 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.007833 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.007859 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.007878 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:52Z","lastTransitionTime":"2026-02-02T22:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.018225 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 12:10:57.407824162 +0000 UTC Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.067869 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.067906 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:52 crc kubenswrapper[4755]: E0202 22:34:52.068093 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.068147 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:52 crc kubenswrapper[4755]: E0202 22:34:52.068315 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:52 crc kubenswrapper[4755]: E0202 22:34:52.068406 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.111073 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.111151 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.111175 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.111210 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.111235 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:52Z","lastTransitionTime":"2026-02-02T22:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.213907 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.214026 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.214045 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.214067 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.214083 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:52Z","lastTransitionTime":"2026-02-02T22:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.317256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.317304 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.317321 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.317343 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.317362 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:52Z","lastTransitionTime":"2026-02-02T22:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.420627 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.420690 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.420714 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.420775 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.420805 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:52Z","lastTransitionTime":"2026-02-02T22:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.523905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.524075 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.524109 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.524200 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.524230 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:52Z","lastTransitionTime":"2026-02-02T22:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.628166 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.628278 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.628311 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.628342 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.628366 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:52Z","lastTransitionTime":"2026-02-02T22:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.732115 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.732188 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.732211 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.732242 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.732264 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:52Z","lastTransitionTime":"2026-02-02T22:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.835596 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.836073 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.836273 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.836469 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.836607 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:52Z","lastTransitionTime":"2026-02-02T22:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.939848 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.940201 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.940396 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.940985 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:52 crc kubenswrapper[4755]: I0202 22:34:52.941161 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:52Z","lastTransitionTime":"2026-02-02T22:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.019196 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 18:35:26.638773554 +0000 UTC Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.044341 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.044407 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.044425 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.044450 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.044467 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:53Z","lastTransitionTime":"2026-02-02T22:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.068606 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:53 crc kubenswrapper[4755]: E0202 22:34:53.068812 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.147191 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.147251 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.147269 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.147292 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.147310 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:53Z","lastTransitionTime":"2026-02-02T22:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.255238 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.255328 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.255355 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.255603 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.255650 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:53Z","lastTransitionTime":"2026-02-02T22:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.302546 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:53 crc kubenswrapper[4755]: E0202 22:34:53.302759 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:34:53 crc kubenswrapper[4755]: E0202 22:34:53.302878 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs podName:3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923 nodeName:}" failed. No retries permitted until 2026-02-02 22:35:01.302846845 +0000 UTC m=+56.994067211 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs") pod "network-metrics-daemon-k8tml" (UID: "3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.359106 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.359164 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.359182 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.359206 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.359222 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:53Z","lastTransitionTime":"2026-02-02T22:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.463105 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.463168 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.463185 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.463209 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.463226 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:53Z","lastTransitionTime":"2026-02-02T22:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.565705 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.565811 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.565830 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.565854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.565871 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:53Z","lastTransitionTime":"2026-02-02T22:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.668695 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.668798 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.668820 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.668849 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.668881 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:53Z","lastTransitionTime":"2026-02-02T22:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.772101 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.772163 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.772173 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.772187 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.772214 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:53Z","lastTransitionTime":"2026-02-02T22:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.859702 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.869775 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.875105 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.875164 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.875182 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.875206 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.875223 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:53Z","lastTransitionTime":"2026-02-02T22:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.880213 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:53Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.899867 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:53Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.917495 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:53Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.938407 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:53Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.961383 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:53Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.978770 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.978848 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.978892 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.978924 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.978943 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:53Z","lastTransitionTime":"2026-02-02T22:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.980569 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:53Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:53 crc kubenswrapper[4755]: I0202 22:34:53.998602 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:53Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.017451 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:54Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.020715 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 11:43:25.955041561 +0000 UTC Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.040485 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:54Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.057514 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:54Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.068519 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.068534 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.068601 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:54 crc kubenswrapper[4755]: E0202 22:34:54.068801 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:54 crc kubenswrapper[4755]: E0202 22:34:54.068920 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:54 crc kubenswrapper[4755]: E0202 22:34:54.069627 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.073236 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:54Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.082064 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.082131 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.082149 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.082175 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.082194 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:54Z","lastTransitionTime":"2026-02-02T22:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.095659 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:54Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.128418 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:54Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.161137 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:54Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.180009 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:44Z\\\",\\\"message\\\":\\\"ecycle-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 22:34:43.693285 6210 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:34:43.693406 6210 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:54Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.184842 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.184880 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.184890 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.184905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.184914 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:54Z","lastTransitionTime":"2026-02-02T22:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.287900 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.288012 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.288039 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.288066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.288084 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:54Z","lastTransitionTime":"2026-02-02T22:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.390549 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.390620 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.390641 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.390667 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.390685 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:54Z","lastTransitionTime":"2026-02-02T22:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.493442 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.493849 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.494023 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.494172 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.494309 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:54Z","lastTransitionTime":"2026-02-02T22:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.597790 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.597860 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.597883 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.597912 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.597939 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:54Z","lastTransitionTime":"2026-02-02T22:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.701130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.701202 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.701222 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.701250 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.701268 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:54Z","lastTransitionTime":"2026-02-02T22:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.803759 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.803831 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.803855 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.803885 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.803917 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:54Z","lastTransitionTime":"2026-02-02T22:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.907268 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.907341 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.907361 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.907384 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:54 crc kubenswrapper[4755]: I0202 22:34:54.907400 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:54Z","lastTransitionTime":"2026-02-02T22:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.010071 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.010147 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.010169 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.010197 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.010217 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:55Z","lastTransitionTime":"2026-02-02T22:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.021376 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 19:26:05.907323797 +0000 UTC Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.068598 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:55 crc kubenswrapper[4755]: E0202 22:34:55.068839 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.088253 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.110794 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.113406 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.113477 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.113501 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.113532 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.113554 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:55Z","lastTransitionTime":"2026-02-02T22:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.131357 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.155645 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.187480 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:44Z\\\",\\\"message\\\":\\\"ecycle-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 22:34:43.693285 6210 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:34:43.693406 6210 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.204777 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.216226 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.216288 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.216306 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.216331 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.216351 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:55Z","lastTransitionTime":"2026-02-02T22:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.223793 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.240593 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.257938 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.278568 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.297408 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.316845 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.318340 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.318430 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.318456 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.318489 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.318510 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:55Z","lastTransitionTime":"2026-02-02T22:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.336480 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.357526 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.379236 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.401393 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:55Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.422152 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.422220 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.422272 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.422297 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.422315 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:55Z","lastTransitionTime":"2026-02-02T22:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.525542 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.525611 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.525630 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.525656 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.525674 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:55Z","lastTransitionTime":"2026-02-02T22:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.628587 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.628646 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.628662 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.628686 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.628703 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:55Z","lastTransitionTime":"2026-02-02T22:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.731953 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.731999 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.732018 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.732041 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.732058 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:55Z","lastTransitionTime":"2026-02-02T22:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.834772 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.834840 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.834859 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.834884 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.834906 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:55Z","lastTransitionTime":"2026-02-02T22:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.943613 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.943674 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.943703 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.943759 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:55 crc kubenswrapper[4755]: I0202 22:34:55.945600 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:55Z","lastTransitionTime":"2026-02-02T22:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.022251 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 19:40:17.6382542 +0000 UTC Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.049684 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.049764 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.049784 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.049808 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.049827 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:56Z","lastTransitionTime":"2026-02-02T22:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.068423 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.068517 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:56 crc kubenswrapper[4755]: E0202 22:34:56.068626 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.068446 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:56 crc kubenswrapper[4755]: E0202 22:34:56.069247 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:56 crc kubenswrapper[4755]: E0202 22:34:56.069387 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.152890 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.152967 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.153029 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.153060 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.153079 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:56Z","lastTransitionTime":"2026-02-02T22:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.255933 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.256001 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.256021 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.256047 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.256064 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:56Z","lastTransitionTime":"2026-02-02T22:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.362295 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.362354 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.362371 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.362396 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.362413 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:56Z","lastTransitionTime":"2026-02-02T22:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.464796 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.464824 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.464833 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.464845 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.464856 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:56Z","lastTransitionTime":"2026-02-02T22:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.567913 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.567953 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.567966 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.567982 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.567993 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:56Z","lastTransitionTime":"2026-02-02T22:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.671475 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.671525 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.671537 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.671553 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.671567 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:56Z","lastTransitionTime":"2026-02-02T22:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.774587 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.774626 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.774659 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.774679 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.774690 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:56Z","lastTransitionTime":"2026-02-02T22:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.877496 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.877543 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.877552 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.877570 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.877581 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:56Z","lastTransitionTime":"2026-02-02T22:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.980841 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.981177 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.981396 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.981547 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:56 crc kubenswrapper[4755]: I0202 22:34:56.981701 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:56Z","lastTransitionTime":"2026-02-02T22:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.023388 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 00:21:44.221571026 +0000 UTC Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.068500 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.068978 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.084169 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.084221 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.084241 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.084266 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.084282 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:57Z","lastTransitionTime":"2026-02-02T22:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.187310 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.187559 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.187646 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.187762 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.187863 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:57Z","lastTransitionTime":"2026-02-02T22:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.290753 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.290820 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.290844 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.290875 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.290899 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:57Z","lastTransitionTime":"2026-02-02T22:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.377465 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.393983 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.394326 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.394548 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.394835 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.395243 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:57Z","lastTransitionTime":"2026-02-02T22:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.396508 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.413384 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.431976 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.448182 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.473938 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.498897 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.499214 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.499460 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.499657 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.499822 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:57Z","lastTransitionTime":"2026-02-02T22:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.505492 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:44Z\\\",\\\"message\\\":\\\"ecycle-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 22:34:43.693285 6210 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:34:43.693406 6210 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.523330 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.539825 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.559494 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.576561 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.592903 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.603137 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.603196 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.603216 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.603242 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.603262 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:57Z","lastTransitionTime":"2026-02-02T22:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.615958 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.636979 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.659617 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.679593 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.691936 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:34:57Z is after 2025-08-24T17:21:41Z" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.705534 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.705609 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.705642 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.705671 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.705692 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:57Z","lastTransitionTime":"2026-02-02T22:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.809095 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.809176 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.809198 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.809229 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.809253 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:57Z","lastTransitionTime":"2026-02-02T22:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.912813 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.913117 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.913300 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.913479 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.913615 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:57Z","lastTransitionTime":"2026-02-02T22:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.957539 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.957680 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.957715 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.957785 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:57 crc kubenswrapper[4755]: I0202 22:34:57.957811 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.957945 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.957996 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.958012 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:35:29.957994557 +0000 UTC m=+85.649214893 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.958006 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.958143 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.958158 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:35:29.958120451 +0000 UTC m=+85.649340817 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.958182 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.958190 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.958211 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.958224 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.958290 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 22:35:29.958271765 +0000 UTC m=+85.649492121 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.958321 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 22:35:29.958309536 +0000 UTC m=+85.649529912 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:34:57 crc kubenswrapper[4755]: E0202 22:34:57.958353 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:35:29.958339357 +0000 UTC m=+85.649559693 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.017045 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.017110 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.017128 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.017153 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.017172 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:58Z","lastTransitionTime":"2026-02-02T22:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.024176 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 22:12:12.805974128 +0000 UTC Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.067802 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.067827 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.067844 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:34:58 crc kubenswrapper[4755]: E0202 22:34:58.067992 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:34:58 crc kubenswrapper[4755]: E0202 22:34:58.068188 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:34:58 crc kubenswrapper[4755]: E0202 22:34:58.068334 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.120643 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.120717 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.120775 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.120808 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.120833 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:58Z","lastTransitionTime":"2026-02-02T22:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.224212 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.224277 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.224301 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.224329 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.224352 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:58Z","lastTransitionTime":"2026-02-02T22:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.327706 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.327810 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.327835 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.327869 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.327892 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:58Z","lastTransitionTime":"2026-02-02T22:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.431639 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.431698 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.431716 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.431768 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.431784 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:58Z","lastTransitionTime":"2026-02-02T22:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.534642 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.534694 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.534712 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.534790 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.534808 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:58Z","lastTransitionTime":"2026-02-02T22:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.638039 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.638340 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.638890 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.639063 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.639202 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:58Z","lastTransitionTime":"2026-02-02T22:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.742965 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.743304 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.743454 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.743617 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.743775 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:58Z","lastTransitionTime":"2026-02-02T22:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.846985 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.847318 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.847597 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.847720 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.847900 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:58Z","lastTransitionTime":"2026-02-02T22:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.951016 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.951072 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.951093 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.951119 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:58 crc kubenswrapper[4755]: I0202 22:34:58.951139 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:58Z","lastTransitionTime":"2026-02-02T22:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.025127 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 23:56:31.402509576 +0000 UTC Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.053881 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.053942 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.053966 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.053995 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.054018 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:59Z","lastTransitionTime":"2026-02-02T22:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.070813 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:34:59 crc kubenswrapper[4755]: E0202 22:34:59.071020 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.157423 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.157577 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.157611 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.157634 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.157651 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:59Z","lastTransitionTime":"2026-02-02T22:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.260816 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.260885 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.260908 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.260936 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.260956 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:59Z","lastTransitionTime":"2026-02-02T22:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.363808 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.363862 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.363879 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.363904 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.363921 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:59Z","lastTransitionTime":"2026-02-02T22:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.467202 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.467267 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.467291 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.467322 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.467344 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:59Z","lastTransitionTime":"2026-02-02T22:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.570853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.571162 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.571336 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.571493 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.571633 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:59Z","lastTransitionTime":"2026-02-02T22:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.675478 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.675534 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.675552 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.675577 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.675595 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:59Z","lastTransitionTime":"2026-02-02T22:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.779694 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.780159 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.780305 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.780455 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.780649 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:59Z","lastTransitionTime":"2026-02-02T22:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.883900 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.884244 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.884411 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.884561 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.884686 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:59Z","lastTransitionTime":"2026-02-02T22:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.988103 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.988445 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.988611 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.988849 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:34:59 crc kubenswrapper[4755]: I0202 22:34:59.989036 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:34:59Z","lastTransitionTime":"2026-02-02T22:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.025968 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 18:14:42.667194558 +0000 UTC Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.068329 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:00 crc kubenswrapper[4755]: E0202 22:35:00.068509 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.068541 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.069164 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:00 crc kubenswrapper[4755]: E0202 22:35:00.069298 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:00 crc kubenswrapper[4755]: E0202 22:35:00.069402 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.070099 4755 scope.go:117] "RemoveContainer" containerID="33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.092179 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.092235 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.092252 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.092275 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.092293 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:00Z","lastTransitionTime":"2026-02-02T22:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.196347 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.196414 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.196437 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.196466 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.196487 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:00Z","lastTransitionTime":"2026-02-02T22:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.300168 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.300217 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.300234 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.300255 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.300272 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:00Z","lastTransitionTime":"2026-02-02T22:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.404160 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.404216 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.404233 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.404256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.404272 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:00Z","lastTransitionTime":"2026-02-02T22:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.507854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.507941 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.507970 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.508003 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.508028 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:00Z","lastTransitionTime":"2026-02-02T22:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.523234 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/1.log" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.529275 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b"} Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.529920 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.554068 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.576794 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.596966 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.610282 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.610320 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.610335 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.610354 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.610367 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:00Z","lastTransitionTime":"2026-02-02T22:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.612065 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.686962 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.707049 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.712362 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.712395 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.712407 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.712423 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.712435 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:00Z","lastTransitionTime":"2026-02-02T22:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.720651 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.734037 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.748923 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.760044 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.770618 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.783119 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.792446 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.805544 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.815303 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.815375 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.815395 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.815422 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.815440 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:00Z","lastTransitionTime":"2026-02-02T22:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.829754 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:44Z\\\",\\\"message\\\":\\\"ecycle-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 22:34:43.693285 6210 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:34:43.693406 6210 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:35:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.838778 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:00Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.917442 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.917501 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.917520 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.917545 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:00 crc kubenswrapper[4755]: I0202 22:35:00.917561 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:00Z","lastTransitionTime":"2026-02-02T22:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.020254 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.020302 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.020320 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.020342 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.020360 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.026648 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 02:53:41.415721305 +0000 UTC Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.068285 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:01 crc kubenswrapper[4755]: E0202 22:35:01.068484 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.122531 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.122596 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.122616 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.122646 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.122664 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.225550 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.225588 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.225600 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.225616 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.225627 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.327946 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.328015 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.328034 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.328063 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.328082 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.395604 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:01 crc kubenswrapper[4755]: E0202 22:35:01.395795 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:35:01 crc kubenswrapper[4755]: E0202 22:35:01.395870 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs podName:3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923 nodeName:}" failed. No retries permitted until 2026-02-02 22:35:17.395849321 +0000 UTC m=+73.087069677 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs") pod "network-metrics-daemon-k8tml" (UID: "3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.430580 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.430619 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.430630 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.430644 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.430653 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.523326 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.523379 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.523396 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.523419 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.523437 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.535224 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/2.log" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.537186 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/1.log" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.541308 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b" exitCode=1 Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.541376 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.541438 4755 scope.go:117] "RemoveContainer" containerID="33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.542518 4755 scope.go:117] "RemoveContainer" containerID="f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b" Feb 02 22:35:01 crc kubenswrapper[4755]: E0202 22:35:01.543083 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" Feb 02 22:35:01 crc kubenswrapper[4755]: E0202 22:35:01.547460 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.552453 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.552496 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.552512 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.552535 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.552554 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.562149 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: E0202 22:35:01.574051 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.578604 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.579497 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.579540 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.579555 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.579576 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.579591 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.596498 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: E0202 22:35:01.597278 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.601703 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.601812 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.601836 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.601903 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.601924 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.617096 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: E0202 22:35:01.622013 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.630134 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.630190 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.630209 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.630237 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.630255 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.632677 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: E0202 22:35:01.650930 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: E0202 22:35:01.651245 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.653897 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.653939 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.653953 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.653972 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.653984 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.654896 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.678777 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33bcfe348b8d6076aef27610d63f553503678c9e7da145cc9fc2d9f93956e294\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:34:44Z\\\",\\\"message\\\":\\\"ecycle-manager/olm-operator-metrics]} name:Service_openshift-operator-lifecycle-manager/olm-operator-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.168:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {63b1440a-0908-4cab-8799-012fa1cf0b07}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 22:34:43.693285 6210 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver-operator/metrics]} name:Service_openshift-kube-apiserver-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.109:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {1f62a432-33b9-495d-83b2-d1dbe6961325}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:34:43.693406 6210 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console/downloads]} name:Service_openshift-console/downloads_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.213:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2ead45b3-c313-4fbc-a7bc-2b3c4ffd610c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:35:01.084987 6433 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:35:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.696572 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.714506 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.730191 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.744065 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.755908 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.755960 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.755978 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.756002 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.756020 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.764415 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.787585 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.805919 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.824023 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.846307 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:01Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.858621 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.858673 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.858691 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.858719 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.858777 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.962159 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.962221 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.962238 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.962262 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:01 crc kubenswrapper[4755]: I0202 22:35:01.962280 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:01Z","lastTransitionTime":"2026-02-02T22:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.027665 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 04:03:24.231727616 +0000 UTC Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.065526 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.065634 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.065652 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.065675 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.065719 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:02Z","lastTransitionTime":"2026-02-02T22:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.067869 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.067889 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.067978 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:02 crc kubenswrapper[4755]: E0202 22:35:02.068021 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:02 crc kubenswrapper[4755]: E0202 22:35:02.068185 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:02 crc kubenswrapper[4755]: E0202 22:35:02.068278 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.169079 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.169155 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.169178 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.169208 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.169231 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:02Z","lastTransitionTime":"2026-02-02T22:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.272137 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.272213 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.272239 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.272268 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.272293 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:02Z","lastTransitionTime":"2026-02-02T22:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.375501 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.375572 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.375590 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.375616 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.375634 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:02Z","lastTransitionTime":"2026-02-02T22:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.478188 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.478245 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.478265 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.478291 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.478462 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:02Z","lastTransitionTime":"2026-02-02T22:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.547683 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/2.log" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.553761 4755 scope.go:117] "RemoveContainer" containerID="f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b" Feb 02 22:35:02 crc kubenswrapper[4755]: E0202 22:35:02.554011 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.578018 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.580915 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.580966 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.580984 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.581010 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.581027 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:02Z","lastTransitionTime":"2026-02-02T22:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.601043 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.620185 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.639295 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.659816 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.677435 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.684323 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.684371 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.684389 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.684414 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.684431 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:02Z","lastTransitionTime":"2026-02-02T22:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.707225 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console/downloads]} name:Service_openshift-console/downloads_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.213:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2ead45b3-c313-4fbc-a7bc-2b3c4ffd610c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:35:01.084987 6433 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:35:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.723389 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.740604 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.761615 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.779298 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.786840 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.786891 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.786908 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.786932 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.786950 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:02Z","lastTransitionTime":"2026-02-02T22:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.799808 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.819660 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.841400 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.858791 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.873486 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:02Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.889473 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.889513 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.889530 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.889555 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.889572 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:02Z","lastTransitionTime":"2026-02-02T22:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.992402 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.992462 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.992483 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.992507 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:02 crc kubenswrapper[4755]: I0202 22:35:02.992524 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:02Z","lastTransitionTime":"2026-02-02T22:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.028421 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 12:18:41.291489208 +0000 UTC Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.067866 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:03 crc kubenswrapper[4755]: E0202 22:35:03.068046 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.095378 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.095448 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.095471 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.095498 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.095521 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:03Z","lastTransitionTime":"2026-02-02T22:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.198837 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.198891 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.198908 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.198932 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.198949 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:03Z","lastTransitionTime":"2026-02-02T22:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.302810 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.302906 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.302942 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.302973 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.302995 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:03Z","lastTransitionTime":"2026-02-02T22:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.406121 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.406186 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.406210 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.406245 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.406270 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:03Z","lastTransitionTime":"2026-02-02T22:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.508673 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.508724 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.508780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.508806 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.508829 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:03Z","lastTransitionTime":"2026-02-02T22:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.611484 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.611561 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.611587 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.611619 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.611637 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:03Z","lastTransitionTime":"2026-02-02T22:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.714328 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.714387 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.714408 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.714431 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.714449 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:03Z","lastTransitionTime":"2026-02-02T22:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.816725 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.816776 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.816789 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.816811 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.816821 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:03Z","lastTransitionTime":"2026-02-02T22:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.920459 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.920536 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.920560 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.920591 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:03 crc kubenswrapper[4755]: I0202 22:35:03.920817 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:03Z","lastTransitionTime":"2026-02-02T22:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.024013 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.024057 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.024075 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.024097 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.024114 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:04Z","lastTransitionTime":"2026-02-02T22:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.028708 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 20:12:12.76421612 +0000 UTC Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.068316 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.068364 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.068363 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:04 crc kubenswrapper[4755]: E0202 22:35:04.068463 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:04 crc kubenswrapper[4755]: E0202 22:35:04.068682 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:04 crc kubenswrapper[4755]: E0202 22:35:04.068786 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.126981 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.127040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.127063 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.127090 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.127111 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:04Z","lastTransitionTime":"2026-02-02T22:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.230013 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.230064 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.230081 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.230104 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.230123 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:04Z","lastTransitionTime":"2026-02-02T22:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.332550 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.333004 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.333146 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.333397 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.333625 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:04Z","lastTransitionTime":"2026-02-02T22:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.436674 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.436776 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.436804 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.436831 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.436849 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:04Z","lastTransitionTime":"2026-02-02T22:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.539316 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.539380 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.539398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.539421 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.539439 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:04Z","lastTransitionTime":"2026-02-02T22:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.642632 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.642706 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.642754 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.642781 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.642804 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:04Z","lastTransitionTime":"2026-02-02T22:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.746102 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.746183 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.746205 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.746230 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.746251 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:04Z","lastTransitionTime":"2026-02-02T22:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.849537 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.849947 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.850148 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.850311 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.850496 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:04Z","lastTransitionTime":"2026-02-02T22:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.953046 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.953427 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.953582 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.953844 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:04 crc kubenswrapper[4755]: I0202 22:35:04.954013 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:04Z","lastTransitionTime":"2026-02-02T22:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.029332 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 08:05:16.733999902 +0000 UTC Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.056595 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.056650 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.056669 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.056693 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.056711 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:05Z","lastTransitionTime":"2026-02-02T22:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.071286 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:05 crc kubenswrapper[4755]: E0202 22:35:05.071546 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.088562 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.105916 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.127327 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.149640 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.158820 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.158865 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.158881 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.158904 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.158962 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:05Z","lastTransitionTime":"2026-02-02T22:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.174348 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.206668 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console/downloads]} name:Service_openshift-console/downloads_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.213:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2ead45b3-c313-4fbc-a7bc-2b3c4ffd610c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:35:01.084987 6433 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:35:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.224800 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.244218 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.262868 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.262958 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.262987 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.263027 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.263051 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:05Z","lastTransitionTime":"2026-02-02T22:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.263404 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.280912 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.298241 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.323344 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.344813 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.365748 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.366205 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.366787 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.366885 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.366913 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.367400 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:05Z","lastTransitionTime":"2026-02-02T22:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.385696 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.410236 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:05Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.470987 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.471044 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.471062 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.471086 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.471103 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:05Z","lastTransitionTime":"2026-02-02T22:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.573676 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.573762 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.573780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.573803 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.573820 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:05Z","lastTransitionTime":"2026-02-02T22:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.677131 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.677191 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.677209 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.677235 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.677261 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:05Z","lastTransitionTime":"2026-02-02T22:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.781353 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.781420 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.781444 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.781473 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.781499 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:05Z","lastTransitionTime":"2026-02-02T22:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.884153 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.884216 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.884239 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.884266 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.884288 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:05Z","lastTransitionTime":"2026-02-02T22:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.986591 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.986645 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.986662 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.986687 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:05 crc kubenswrapper[4755]: I0202 22:35:05.986704 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:05Z","lastTransitionTime":"2026-02-02T22:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.030328 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 16:10:30.146332845 +0000 UTC Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.068373 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.068421 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.068453 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:06 crc kubenswrapper[4755]: E0202 22:35:06.068563 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:06 crc kubenswrapper[4755]: E0202 22:35:06.068681 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:06 crc kubenswrapper[4755]: E0202 22:35:06.068883 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.089831 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.089889 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.089906 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.089926 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.089942 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:06Z","lastTransitionTime":"2026-02-02T22:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.193386 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.193451 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.193470 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.193496 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.193516 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:06Z","lastTransitionTime":"2026-02-02T22:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.297386 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.297455 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.297485 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.297508 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.297525 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:06Z","lastTransitionTime":"2026-02-02T22:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.401368 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.401442 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.401461 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.401487 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.401504 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:06Z","lastTransitionTime":"2026-02-02T22:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.504944 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.505033 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.505058 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.505088 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.505114 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:06Z","lastTransitionTime":"2026-02-02T22:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.607605 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.607956 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.608004 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.608032 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.608071 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:06Z","lastTransitionTime":"2026-02-02T22:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.712205 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.712304 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.712322 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.712346 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.712364 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:06Z","lastTransitionTime":"2026-02-02T22:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.816067 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.816163 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.816189 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.816213 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.816230 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:06Z","lastTransitionTime":"2026-02-02T22:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.920160 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.920215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.920232 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.920288 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:06 crc kubenswrapper[4755]: I0202 22:35:06.920309 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:06Z","lastTransitionTime":"2026-02-02T22:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.023204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.023290 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.023316 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.023343 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.023362 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:07Z","lastTransitionTime":"2026-02-02T22:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.031340 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 14:24:58.208645343 +0000 UTC Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.068770 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:07 crc kubenswrapper[4755]: E0202 22:35:07.068985 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.127121 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.127235 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.127263 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.127342 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.127370 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:07Z","lastTransitionTime":"2026-02-02T22:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.231950 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.232004 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.232022 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.232050 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.232068 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:07Z","lastTransitionTime":"2026-02-02T22:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.334979 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.335032 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.335049 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.335072 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.335091 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:07Z","lastTransitionTime":"2026-02-02T22:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.437387 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.437449 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.437466 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.437491 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.437509 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:07Z","lastTransitionTime":"2026-02-02T22:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.540096 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.540150 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.540159 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.540172 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.540197 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:07Z","lastTransitionTime":"2026-02-02T22:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.642588 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.642654 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.642672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.642696 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.642714 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:07Z","lastTransitionTime":"2026-02-02T22:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.745928 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.746218 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.746250 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.746275 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.746295 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:07Z","lastTransitionTime":"2026-02-02T22:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.849329 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.849423 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.849449 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.849473 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.849490 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:07Z","lastTransitionTime":"2026-02-02T22:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.951849 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.951908 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.951932 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.951960 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:07 crc kubenswrapper[4755]: I0202 22:35:07.951980 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:07Z","lastTransitionTime":"2026-02-02T22:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.032541 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 17:46:11.423361466 +0000 UTC Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.054988 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.055047 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.055070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.055099 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.055120 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:08Z","lastTransitionTime":"2026-02-02T22:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.068370 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.068420 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.068456 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:08 crc kubenswrapper[4755]: E0202 22:35:08.068543 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:08 crc kubenswrapper[4755]: E0202 22:35:08.068667 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:08 crc kubenswrapper[4755]: E0202 22:35:08.068808 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.157790 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.157849 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.157868 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.157897 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.157915 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:08Z","lastTransitionTime":"2026-02-02T22:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.260950 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.261033 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.261053 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.261077 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.261095 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:08Z","lastTransitionTime":"2026-02-02T22:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.363795 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.363857 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.363875 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.363899 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.363918 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:08Z","lastTransitionTime":"2026-02-02T22:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.467256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.467332 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.467352 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.467378 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.467395 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:08Z","lastTransitionTime":"2026-02-02T22:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.570648 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.570698 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.570716 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.570763 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.570781 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:08Z","lastTransitionTime":"2026-02-02T22:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.672914 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.672974 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.672994 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.673017 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.673034 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:08Z","lastTransitionTime":"2026-02-02T22:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.775656 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.775694 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.775704 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.775721 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.775758 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:08Z","lastTransitionTime":"2026-02-02T22:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.877866 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.877928 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.877946 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.877968 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.877984 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:08Z","lastTransitionTime":"2026-02-02T22:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.980262 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.980307 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.980346 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.980361 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:08 crc kubenswrapper[4755]: I0202 22:35:08.980370 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:08Z","lastTransitionTime":"2026-02-02T22:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.033393 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 02:11:50.523488411 +0000 UTC Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.068920 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:09 crc kubenswrapper[4755]: E0202 22:35:09.069100 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.083522 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.083575 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.083593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.083616 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.083632 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:09Z","lastTransitionTime":"2026-02-02T22:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.186117 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.186157 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.186166 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.186181 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.186191 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:09Z","lastTransitionTime":"2026-02-02T22:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.288790 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.288853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.288874 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.288900 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.288918 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:09Z","lastTransitionTime":"2026-02-02T22:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.391649 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.391715 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.391880 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.391905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.391923 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:09Z","lastTransitionTime":"2026-02-02T22:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.494900 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.494965 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.494983 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.495008 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.495024 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:09Z","lastTransitionTime":"2026-02-02T22:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.597419 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.597482 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.597500 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.597526 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.597543 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:09Z","lastTransitionTime":"2026-02-02T22:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.699876 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.699930 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.699948 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.699971 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.699987 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:09Z","lastTransitionTime":"2026-02-02T22:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.802392 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.802436 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.802452 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.802472 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.802488 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:09Z","lastTransitionTime":"2026-02-02T22:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.905225 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.905304 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.905323 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.905351 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:09 crc kubenswrapper[4755]: I0202 22:35:09.905368 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:09Z","lastTransitionTime":"2026-02-02T22:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.008008 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.008062 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.008080 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.008103 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.008119 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:10Z","lastTransitionTime":"2026-02-02T22:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.033761 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 03:17:32.735794906 +0000 UTC Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.068302 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.068352 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.068317 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:10 crc kubenswrapper[4755]: E0202 22:35:10.068458 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:10 crc kubenswrapper[4755]: E0202 22:35:10.068597 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:10 crc kubenswrapper[4755]: E0202 22:35:10.068698 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.111200 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.111264 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.111283 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.111307 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.111326 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:10Z","lastTransitionTime":"2026-02-02T22:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.213549 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.213580 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.213589 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.213601 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.213609 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:10Z","lastTransitionTime":"2026-02-02T22:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.316058 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.316092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.316102 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.316116 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.316124 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:10Z","lastTransitionTime":"2026-02-02T22:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.418313 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.418347 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.418355 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.418369 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.418378 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:10Z","lastTransitionTime":"2026-02-02T22:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.520408 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.520460 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.520468 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.520481 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.520493 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:10Z","lastTransitionTime":"2026-02-02T22:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.623318 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.623390 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.623408 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.623432 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.623449 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:10Z","lastTransitionTime":"2026-02-02T22:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.725901 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.725929 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.725937 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.725951 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.725959 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:10Z","lastTransitionTime":"2026-02-02T22:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.827894 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.827934 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.827949 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.827963 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.827973 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:10Z","lastTransitionTime":"2026-02-02T22:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.930573 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.930611 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.930620 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.930634 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:10 crc kubenswrapper[4755]: I0202 22:35:10.930643 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:10Z","lastTransitionTime":"2026-02-02T22:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.032644 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.032714 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.032783 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.032850 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.032872 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:11Z","lastTransitionTime":"2026-02-02T22:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.034931 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 21:01:29.493293382 +0000 UTC Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.069125 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:11 crc kubenswrapper[4755]: E0202 22:35:11.069329 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.135191 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.135282 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.135296 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.135312 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.135324 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:11Z","lastTransitionTime":"2026-02-02T22:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.238494 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.238529 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.238541 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.238563 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.238577 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:11Z","lastTransitionTime":"2026-02-02T22:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.350178 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.350350 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.350380 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.350415 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.350453 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:11Z","lastTransitionTime":"2026-02-02T22:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.453869 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.453938 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.453963 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.453995 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.454020 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:11Z","lastTransitionTime":"2026-02-02T22:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.555825 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.555858 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.555868 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.555881 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.555890 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:11Z","lastTransitionTime":"2026-02-02T22:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.658012 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.658067 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.658086 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.658107 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.658123 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:11Z","lastTransitionTime":"2026-02-02T22:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.761038 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.761146 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.761163 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.761227 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.761243 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:11Z","lastTransitionTime":"2026-02-02T22:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.863942 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.863988 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.864000 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.864016 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.864029 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:11Z","lastTransitionTime":"2026-02-02T22:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.966894 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.966935 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.966947 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.966961 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:11 crc kubenswrapper[4755]: I0202 22:35:11.966974 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:11Z","lastTransitionTime":"2026-02-02T22:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.035314 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 14:23:38.323323532 +0000 UTC Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.040862 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.040931 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.040960 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.040988 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.041006 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: E0202 22:35:12.055403 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:12Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.059511 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.059565 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.059585 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.059617 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.059637 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.068432 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.068499 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:12 crc kubenswrapper[4755]: E0202 22:35:12.068529 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.068536 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:12 crc kubenswrapper[4755]: E0202 22:35:12.068646 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:12 crc kubenswrapper[4755]: E0202 22:35:12.068909 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:12 crc kubenswrapper[4755]: E0202 22:35:12.072847 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:12Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.077207 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.077268 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.077285 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.077693 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.077792 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: E0202 22:35:12.096798 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:12Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.101214 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.101262 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.101279 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.101300 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.101316 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: E0202 22:35:12.125652 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:12Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.130133 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.130230 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.130248 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.130267 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.130280 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: E0202 22:35:12.145906 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:12Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:12 crc kubenswrapper[4755]: E0202 22:35:12.146254 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.148284 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.148349 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.148374 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.148404 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.148427 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.251464 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.251552 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.251969 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.252033 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.252055 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.355490 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.355758 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.355859 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.355942 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.356020 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.458419 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.458478 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.458490 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.458506 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.458517 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.560158 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.560202 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.560213 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.560229 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.560240 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.662108 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.662140 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.662152 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.662167 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.662179 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.764956 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.765015 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.765034 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.765060 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.765078 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.868036 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.868434 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.868580 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.868777 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.868938 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.972917 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.972947 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.972957 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.972969 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:12 crc kubenswrapper[4755]: I0202 22:35:12.972978 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:12Z","lastTransitionTime":"2026-02-02T22:35:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.035935 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 04:30:01.006861319 +0000 UTC Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.068656 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:13 crc kubenswrapper[4755]: E0202 22:35:13.068772 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.077595 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.077656 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.077676 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.077702 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.077720 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:13Z","lastTransitionTime":"2026-02-02T22:35:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.179970 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.180026 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.180044 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.180065 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.180083 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:13Z","lastTransitionTime":"2026-02-02T22:35:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.281800 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.281844 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.281853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.281869 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.281879 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:13Z","lastTransitionTime":"2026-02-02T22:35:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.384899 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.384936 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.384944 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.384957 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.384966 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:13Z","lastTransitionTime":"2026-02-02T22:35:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.487848 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.487911 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.487930 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.487956 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.487972 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:13Z","lastTransitionTime":"2026-02-02T22:35:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.590085 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.590146 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.590169 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.590196 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.590218 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:13Z","lastTransitionTime":"2026-02-02T22:35:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.692771 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.692802 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.692810 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.692823 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.692831 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:13Z","lastTransitionTime":"2026-02-02T22:35:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.795225 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.795282 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.795300 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.795323 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.795340 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:13Z","lastTransitionTime":"2026-02-02T22:35:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.897888 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.897939 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.897955 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.897979 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:13 crc kubenswrapper[4755]: I0202 22:35:13.898001 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:13Z","lastTransitionTime":"2026-02-02T22:35:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.000795 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.000828 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.000839 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.000853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.000864 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:14Z","lastTransitionTime":"2026-02-02T22:35:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.036402 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 13:21:43.800881193 +0000 UTC Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.068825 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.068851 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.068845 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:14 crc kubenswrapper[4755]: E0202 22:35:14.068996 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:14 crc kubenswrapper[4755]: E0202 22:35:14.069108 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:14 crc kubenswrapper[4755]: E0202 22:35:14.069217 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.103593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.103645 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.103665 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.103691 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.103709 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:14Z","lastTransitionTime":"2026-02-02T22:35:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.206256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.206305 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.206315 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.206332 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.206341 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:14Z","lastTransitionTime":"2026-02-02T22:35:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.308511 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.308552 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.308560 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.308573 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.308582 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:14Z","lastTransitionTime":"2026-02-02T22:35:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.410362 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.410407 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.410418 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.410435 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.410445 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:14Z","lastTransitionTime":"2026-02-02T22:35:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.512627 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.512666 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.512678 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.512696 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.512708 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:14Z","lastTransitionTime":"2026-02-02T22:35:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.614659 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.614704 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.614717 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.614747 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.614758 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:14Z","lastTransitionTime":"2026-02-02T22:35:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.716122 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.716156 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.716166 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.716179 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.716203 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:14Z","lastTransitionTime":"2026-02-02T22:35:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.818915 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.818963 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.818975 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.818992 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.819002 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:14Z","lastTransitionTime":"2026-02-02T22:35:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.921285 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.921325 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.921336 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.921354 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:14 crc kubenswrapper[4755]: I0202 22:35:14.921366 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:14Z","lastTransitionTime":"2026-02-02T22:35:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.023347 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.023468 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.023492 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.023520 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.023541 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:15Z","lastTransitionTime":"2026-02-02T22:35:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.036549 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 15:19:12.42440305 +0000 UTC Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.068382 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:15 crc kubenswrapper[4755]: E0202 22:35:15.068533 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.082501 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.094323 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.112232 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console/downloads]} name:Service_openshift-console/downloads_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.213:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2ead45b3-c313-4fbc-a7bc-2b3c4ffd610c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:35:01.084987 6433 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:35:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.122051 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.125427 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.125483 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.125506 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.125534 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.125557 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:15Z","lastTransitionTime":"2026-02-02T22:35:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.133668 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.147923 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.163946 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.176286 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.186861 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.198809 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.216487 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.228063 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.228927 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.228959 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.228970 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.228986 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.228997 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:15Z","lastTransitionTime":"2026-02-02T22:35:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.242139 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.254523 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.270197 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.280474 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:15Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.331075 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.331124 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.331136 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.331151 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.331456 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:15Z","lastTransitionTime":"2026-02-02T22:35:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.434007 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.434079 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.434101 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.434132 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.434155 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:15Z","lastTransitionTime":"2026-02-02T22:35:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.536534 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.536772 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.536923 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.537258 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.537322 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:15Z","lastTransitionTime":"2026-02-02T22:35:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.639743 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.639784 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.639793 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.639810 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.639820 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:15Z","lastTransitionTime":"2026-02-02T22:35:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.742542 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.742605 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.742620 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.742636 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.742646 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:15Z","lastTransitionTime":"2026-02-02T22:35:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.846180 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.846240 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.846259 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.846281 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.846301 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:15Z","lastTransitionTime":"2026-02-02T22:35:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.948758 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.948812 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.948829 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.948853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:15 crc kubenswrapper[4755]: I0202 22:35:15.948870 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:15Z","lastTransitionTime":"2026-02-02T22:35:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.036874 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 12:14:59.730766485 +0000 UTC Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.050667 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.050710 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.050760 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.050785 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.050801 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:16Z","lastTransitionTime":"2026-02-02T22:35:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.068386 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:16 crc kubenswrapper[4755]: E0202 22:35:16.068524 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.068535 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:16 crc kubenswrapper[4755]: E0202 22:35:16.068843 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.068995 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.069105 4755 scope.go:117] "RemoveContainer" containerID="f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b" Feb 02 22:35:16 crc kubenswrapper[4755]: E0202 22:35:16.069465 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:16 crc kubenswrapper[4755]: E0202 22:35:16.069796 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.152511 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.152542 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.152552 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.152566 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.152578 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:16Z","lastTransitionTime":"2026-02-02T22:35:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.255178 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.255214 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.255222 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.255236 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.255245 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:16Z","lastTransitionTime":"2026-02-02T22:35:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.357232 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.357277 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.357296 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.357320 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.357335 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:16Z","lastTransitionTime":"2026-02-02T22:35:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.458935 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.459000 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.459019 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.459042 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.459059 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:16Z","lastTransitionTime":"2026-02-02T22:35:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.562136 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.562272 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.562380 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.562499 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.562586 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:16Z","lastTransitionTime":"2026-02-02T22:35:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.664458 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.664479 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.664486 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.664498 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.664507 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:16Z","lastTransitionTime":"2026-02-02T22:35:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.767381 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.767438 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.767457 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.767479 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.767497 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:16Z","lastTransitionTime":"2026-02-02T22:35:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.870316 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.870648 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.870850 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.870998 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.871127 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:16Z","lastTransitionTime":"2026-02-02T22:35:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.974256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.974337 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.974348 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.974363 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:16 crc kubenswrapper[4755]: I0202 22:35:16.974373 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:16Z","lastTransitionTime":"2026-02-02T22:35:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.038022 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 23:02:51.274097612 +0000 UTC Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.068878 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:17 crc kubenswrapper[4755]: E0202 22:35:17.069266 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.076654 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.076696 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.076709 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.076740 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.076752 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:17Z","lastTransitionTime":"2026-02-02T22:35:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.179066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.179105 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.179114 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.179128 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.179136 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:17Z","lastTransitionTime":"2026-02-02T22:35:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.281400 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.281439 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.281451 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.281465 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.281475 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:17Z","lastTransitionTime":"2026-02-02T22:35:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.383961 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.384274 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.384409 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.384536 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.384655 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:17Z","lastTransitionTime":"2026-02-02T22:35:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.471374 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:17 crc kubenswrapper[4755]: E0202 22:35:17.471498 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:35:17 crc kubenswrapper[4755]: E0202 22:35:17.471552 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs podName:3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923 nodeName:}" failed. No retries permitted until 2026-02-02 22:35:49.471536033 +0000 UTC m=+105.162756359 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs") pod "network-metrics-daemon-k8tml" (UID: "3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.487201 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.487251 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.487261 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.487275 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.487287 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:17Z","lastTransitionTime":"2026-02-02T22:35:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.589977 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.590035 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.590052 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.590074 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.590090 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:17Z","lastTransitionTime":"2026-02-02T22:35:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.693238 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.693318 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.693343 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.693375 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.693399 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:17Z","lastTransitionTime":"2026-02-02T22:35:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.796429 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.796479 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.796491 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.796507 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.796520 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:17Z","lastTransitionTime":"2026-02-02T22:35:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.898282 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.898352 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.898376 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.898404 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:17 crc kubenswrapper[4755]: I0202 22:35:17.898427 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:17Z","lastTransitionTime":"2026-02-02T22:35:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.001310 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.001352 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.001363 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.001379 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.001388 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:18Z","lastTransitionTime":"2026-02-02T22:35:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.038770 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 01:10:10.14995786 +0000 UTC Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.068263 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.068331 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.068331 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:18 crc kubenswrapper[4755]: E0202 22:35:18.068383 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:18 crc kubenswrapper[4755]: E0202 22:35:18.068500 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:18 crc kubenswrapper[4755]: E0202 22:35:18.068570 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.104204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.104505 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.104648 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.104857 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.105053 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:18Z","lastTransitionTime":"2026-02-02T22:35:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.207942 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.208015 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.208040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.208068 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.208091 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:18Z","lastTransitionTime":"2026-02-02T22:35:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.310874 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.310942 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.310962 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.310986 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.311006 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:18Z","lastTransitionTime":"2026-02-02T22:35:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.413905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.413957 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.413974 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.413996 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.414012 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:18Z","lastTransitionTime":"2026-02-02T22:35:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.516012 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.516364 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.516507 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.516642 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.516799 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:18Z","lastTransitionTime":"2026-02-02T22:35:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.618833 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.618868 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.618880 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.618895 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.618906 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:18Z","lastTransitionTime":"2026-02-02T22:35:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.721012 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.721049 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.721058 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.721071 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.721079 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:18Z","lastTransitionTime":"2026-02-02T22:35:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.823203 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.823258 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.823276 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.823300 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.823318 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:18Z","lastTransitionTime":"2026-02-02T22:35:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.925826 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.925879 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.925898 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.925921 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:18 crc kubenswrapper[4755]: I0202 22:35:18.925941 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:18Z","lastTransitionTime":"2026-02-02T22:35:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.028856 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.028916 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.028935 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.028958 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.028978 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:19Z","lastTransitionTime":"2026-02-02T22:35:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.039358 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 15:31:21.370699064 +0000 UTC Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.068873 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:19 crc kubenswrapper[4755]: E0202 22:35:19.069208 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.132038 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.132106 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.132128 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.132155 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.132175 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:19Z","lastTransitionTime":"2026-02-02T22:35:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.234831 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.234860 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.234869 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.234882 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.234891 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:19Z","lastTransitionTime":"2026-02-02T22:35:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.337937 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.337968 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.337977 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.337990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.338000 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:19Z","lastTransitionTime":"2026-02-02T22:35:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.440980 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.441048 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.441067 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.441091 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.441107 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:19Z","lastTransitionTime":"2026-02-02T22:35:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.544062 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.544110 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.544127 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.544149 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.544166 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:19Z","lastTransitionTime":"2026-02-02T22:35:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.605692 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5fdlw_c206b6fd-200d-47ea-88a5-453f3093c749/kube-multus/0.log" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.605845 4755 generic.go:334] "Generic (PLEG): container finished" podID="c206b6fd-200d-47ea-88a5-453f3093c749" containerID="fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575" exitCode=1 Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.605907 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5fdlw" event={"ID":"c206b6fd-200d-47ea-88a5-453f3093c749","Type":"ContainerDied","Data":"fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575"} Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.608158 4755 scope.go:117] "RemoveContainer" containerID="fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.625018 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.641471 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.647860 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.647910 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.647928 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.647952 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.647969 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:19Z","lastTransitionTime":"2026-02-02T22:35:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.662868 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.677385 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.694168 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:18Z\\\",\\\"message\\\":\\\"2026-02-02T22:34:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e\\\\n2026-02-02T22:34:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e to /host/opt/cni/bin/\\\\n2026-02-02T22:34:33Z [verbose] multus-daemon started\\\\n2026-02-02T22:34:33Z [verbose] Readiness Indicator file check\\\\n2026-02-02T22:35:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.726610 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console/downloads]} name:Service_openshift-console/downloads_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.213:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2ead45b3-c313-4fbc-a7bc-2b3c4ffd610c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:35:01.084987 6433 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:35:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.740550 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.752062 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.752107 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.752122 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.752145 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.752178 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:19Z","lastTransitionTime":"2026-02-02T22:35:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.760341 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.773909 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.785547 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.796401 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.809366 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.821031 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.833463 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.847635 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.855004 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.855056 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.855070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.855092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.855106 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:19Z","lastTransitionTime":"2026-02-02T22:35:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.862645 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:19Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.958671 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.958714 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.958739 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.958760 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:19 crc kubenswrapper[4755]: I0202 22:35:19.958772 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:19Z","lastTransitionTime":"2026-02-02T22:35:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.039917 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 22:55:24.702598027 +0000 UTC Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.061781 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.061836 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.061854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.061879 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.061898 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:20Z","lastTransitionTime":"2026-02-02T22:35:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.068322 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.068370 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.068451 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:20 crc kubenswrapper[4755]: E0202 22:35:20.068669 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:20 crc kubenswrapper[4755]: E0202 22:35:20.068806 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:20 crc kubenswrapper[4755]: E0202 22:35:20.069008 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.164837 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.164895 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.164913 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.164936 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.164952 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:20Z","lastTransitionTime":"2026-02-02T22:35:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.267674 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.267760 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.267788 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.267816 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.267832 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:20Z","lastTransitionTime":"2026-02-02T22:35:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.370086 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.370144 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.370162 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.370186 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.370203 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:20Z","lastTransitionTime":"2026-02-02T22:35:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.473792 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.473873 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.473907 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.473939 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.473959 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:20Z","lastTransitionTime":"2026-02-02T22:35:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.576324 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.576382 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.576401 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.576429 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.576447 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:20Z","lastTransitionTime":"2026-02-02T22:35:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.612475 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5fdlw_c206b6fd-200d-47ea-88a5-453f3093c749/kube-multus/0.log" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.612552 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5fdlw" event={"ID":"c206b6fd-200d-47ea-88a5-453f3093c749","Type":"ContainerStarted","Data":"9261f8876de2e09d0918c8008e1c4bde12d29318d43a80e47aa7d6e6cfc35b2b"} Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.637170 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.657315 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.675687 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.678523 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.678564 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.678581 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.678606 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.678624 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:20Z","lastTransitionTime":"2026-02-02T22:35:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.696464 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.722368 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.741650 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.760196 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.780379 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.785922 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.785987 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.786007 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.786106 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.786123 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:20Z","lastTransitionTime":"2026-02-02T22:35:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.806584 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.824086 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.845880 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9261f8876de2e09d0918c8008e1c4bde12d29318d43a80e47aa7d6e6cfc35b2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:18Z\\\",\\\"message\\\":\\\"2026-02-02T22:34:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e\\\\n2026-02-02T22:34:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e to /host/opt/cni/bin/\\\\n2026-02-02T22:34:33Z [verbose] multus-daemon started\\\\n2026-02-02T22:34:33Z [verbose] Readiness Indicator file check\\\\n2026-02-02T22:35:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:35:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.876678 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console/downloads]} name:Service_openshift-console/downloads_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.213:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2ead45b3-c313-4fbc-a7bc-2b3c4ffd610c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:35:01.084987 6433 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:35:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.888919 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.889510 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.889981 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.890384 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.890783 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:20Z","lastTransitionTime":"2026-02-02T22:35:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.895569 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.914419 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.932151 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.947827 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:20Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.994824 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.994927 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.994952 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.995015 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:20 crc kubenswrapper[4755]: I0202 22:35:20.995035 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:20Z","lastTransitionTime":"2026-02-02T22:35:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.040787 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 06:31:53.549411387 +0000 UTC Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.068224 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:21 crc kubenswrapper[4755]: E0202 22:35:21.068391 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.097229 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.097288 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.097307 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.097331 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.097349 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:21Z","lastTransitionTime":"2026-02-02T22:35:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.200403 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.200459 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.200481 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.200509 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.200532 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:21Z","lastTransitionTime":"2026-02-02T22:35:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.303360 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.303417 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.303434 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.303458 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.303475 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:21Z","lastTransitionTime":"2026-02-02T22:35:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.406993 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.407047 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.407068 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.407094 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.407112 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:21Z","lastTransitionTime":"2026-02-02T22:35:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.510043 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.510400 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.510543 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.510787 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.510921 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:21Z","lastTransitionTime":"2026-02-02T22:35:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.615129 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.615171 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.615186 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.615226 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.615240 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:21Z","lastTransitionTime":"2026-02-02T22:35:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.717966 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.718041 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.718099 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.718130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.718151 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:21Z","lastTransitionTime":"2026-02-02T22:35:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.821413 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.821526 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.821596 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.821623 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.821639 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:21Z","lastTransitionTime":"2026-02-02T22:35:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.924470 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.924835 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.924983 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.925162 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:21 crc kubenswrapper[4755]: I0202 22:35:21.925292 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:21Z","lastTransitionTime":"2026-02-02T22:35:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.029062 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.029187 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.029207 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.029231 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.029248 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.042363 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 18:46:51.977337712 +0000 UTC Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.068853 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.068870 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.068870 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:22 crc kubenswrapper[4755]: E0202 22:35:22.069240 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:22 crc kubenswrapper[4755]: E0202 22:35:22.069333 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:22 crc kubenswrapper[4755]: E0202 22:35:22.069043 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.132299 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.132659 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.132844 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.132981 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.133100 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.236425 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.236489 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.236511 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.236538 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.236558 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.341959 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.342026 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.342043 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.342068 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.342085 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.344238 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.344295 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.344312 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.344337 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.344354 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: E0202 22:35:22.367967 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:22Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.373586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.373672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.373716 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.373788 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.373817 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: E0202 22:35:22.395690 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:22Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.401135 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.401390 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.401523 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.401643 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.401832 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: E0202 22:35:22.423989 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:22Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.432580 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.432925 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.433070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.433238 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.433395 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: E0202 22:35:22.454537 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:22Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.460932 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.460990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.461009 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.461036 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.461054 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: E0202 22:35:22.482344 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:22Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:22 crc kubenswrapper[4755]: E0202 22:35:22.482568 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.485134 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.485190 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.485215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.485246 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.485269 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.587376 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.587436 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.587487 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.587513 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.587531 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.690681 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.690771 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.690799 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.690827 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.690850 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.794031 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.794104 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.794124 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.794152 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.794172 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.897195 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.897253 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.897270 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.897296 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:22 crc kubenswrapper[4755]: I0202 22:35:22.897313 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:22Z","lastTransitionTime":"2026-02-02T22:35:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.000165 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.000266 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.000291 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.000320 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.000341 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:23Z","lastTransitionTime":"2026-02-02T22:35:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.042956 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 06:57:36.850583979 +0000 UTC Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.068885 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:23 crc kubenswrapper[4755]: E0202 22:35:23.069072 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.103215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.103268 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.103286 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.103308 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.103325 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:23Z","lastTransitionTime":"2026-02-02T22:35:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.205593 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.205676 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.205694 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.205756 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.205775 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:23Z","lastTransitionTime":"2026-02-02T22:35:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.308427 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.308482 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.308504 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.308531 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.308552 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:23Z","lastTransitionTime":"2026-02-02T22:35:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.411207 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.411268 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.411286 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.411311 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.411327 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:23Z","lastTransitionTime":"2026-02-02T22:35:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.515115 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.515178 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.515197 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.515226 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.515243 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:23Z","lastTransitionTime":"2026-02-02T22:35:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.618139 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.618187 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.618206 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.618230 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.618247 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:23Z","lastTransitionTime":"2026-02-02T22:35:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.720301 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.720383 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.720397 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.720414 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.720425 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:23Z","lastTransitionTime":"2026-02-02T22:35:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.822704 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.822773 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.822785 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.822801 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.822812 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:23Z","lastTransitionTime":"2026-02-02T22:35:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.925577 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.926029 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.926248 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.926623 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:23 crc kubenswrapper[4755]: I0202 22:35:23.926846 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:23Z","lastTransitionTime":"2026-02-02T22:35:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.030021 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.030075 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.030087 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.030105 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.030117 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:24Z","lastTransitionTime":"2026-02-02T22:35:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.043488 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 17:09:40.968432135 +0000 UTC Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.068797 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:24 crc kubenswrapper[4755]: E0202 22:35:24.068953 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.069049 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.069329 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:24 crc kubenswrapper[4755]: E0202 22:35:24.069497 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:24 crc kubenswrapper[4755]: E0202 22:35:24.069819 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.084696 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.133034 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.133096 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.133121 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.133150 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.133172 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:24Z","lastTransitionTime":"2026-02-02T22:35:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.236622 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.237046 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.237307 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.237483 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.237663 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:24Z","lastTransitionTime":"2026-02-02T22:35:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.340996 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.341369 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.341562 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.341722 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.341907 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:24Z","lastTransitionTime":"2026-02-02T22:35:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.444858 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.445225 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.445416 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.445655 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.445936 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:24Z","lastTransitionTime":"2026-02-02T22:35:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.549494 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.549934 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.550082 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.550232 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.556943 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:24Z","lastTransitionTime":"2026-02-02T22:35:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.660433 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.660511 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.660536 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.660566 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.660592 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:24Z","lastTransitionTime":"2026-02-02T22:35:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.763230 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.763281 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.763298 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.763323 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.763342 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:24Z","lastTransitionTime":"2026-02-02T22:35:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.865992 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.866032 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.866041 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.866054 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.866063 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:24Z","lastTransitionTime":"2026-02-02T22:35:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.981082 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.981137 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.981151 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.981168 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:24 crc kubenswrapper[4755]: I0202 22:35:24.981181 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:24Z","lastTransitionTime":"2026-02-02T22:35:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.043926 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 00:16:31.13545695 +0000 UTC Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.068437 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:25 crc kubenswrapper[4755]: E0202 22:35:25.068583 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.083440 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.083500 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.083522 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.083545 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.083563 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:25Z","lastTransitionTime":"2026-02-02T22:35:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.087268 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.103392 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.118708 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.133972 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.153484 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.168878 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0803e3-8271-4b9e-a956-d29ba8f7182f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f27cf58110e22dee25be7cc3bc69eb426ed482cc82dabe8fc748e22863444b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53dc6e93a1cf31d4ee15672f5a77cb4492586fdd71bf9950b79d82668f1fa483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53dc6e93a1cf31d4ee15672f5a77cb4492586fdd71bf9950b79d82668f1fa483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.185346 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.185471 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.185493 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.185520 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.185539 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:25Z","lastTransitionTime":"2026-02-02T22:35:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.187262 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.206534 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.221608 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.242498 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.259616 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.287610 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.287655 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.287672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.287693 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.287707 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:25Z","lastTransitionTime":"2026-02-02T22:35:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.294421 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console/downloads]} name:Service_openshift-console/downloads_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.213:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2ead45b3-c313-4fbc-a7bc-2b3c4ffd610c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:35:01.084987 6433 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:35:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.312992 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.347312 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.362314 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.372255 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.390174 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.390222 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.390232 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.390248 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.390257 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:25Z","lastTransitionTime":"2026-02-02T22:35:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.393886 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9261f8876de2e09d0918c8008e1c4bde12d29318d43a80e47aa7d6e6cfc35b2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:18Z\\\",\\\"message\\\":\\\"2026-02-02T22:34:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e\\\\n2026-02-02T22:34:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e to /host/opt/cni/bin/\\\\n2026-02-02T22:34:33Z [verbose] multus-daemon started\\\\n2026-02-02T22:34:33Z [verbose] Readiness Indicator file check\\\\n2026-02-02T22:35:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:35:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:25Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.493227 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.493292 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.493310 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.493333 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.493352 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:25Z","lastTransitionTime":"2026-02-02T22:35:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.596608 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.596688 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.596709 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.596780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.596800 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:25Z","lastTransitionTime":"2026-02-02T22:35:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.700053 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.700120 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.700140 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.700167 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.700185 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:25Z","lastTransitionTime":"2026-02-02T22:35:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.804205 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.804283 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.804304 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.804332 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.804353 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:25Z","lastTransitionTime":"2026-02-02T22:35:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.907942 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.907989 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.908003 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.908021 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:25 crc kubenswrapper[4755]: I0202 22:35:25.908034 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:25Z","lastTransitionTime":"2026-02-02T22:35:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.011427 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.011490 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.011508 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.011532 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.011550 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:26Z","lastTransitionTime":"2026-02-02T22:35:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.044343 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 22:42:42.201142355 +0000 UTC Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.068782 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.068831 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.068841 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:26 crc kubenswrapper[4755]: E0202 22:35:26.068968 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:26 crc kubenswrapper[4755]: E0202 22:35:26.069188 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:26 crc kubenswrapper[4755]: E0202 22:35:26.069278 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.114710 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.114826 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.114854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.114887 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.114910 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:26Z","lastTransitionTime":"2026-02-02T22:35:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.218444 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.218544 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.218565 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.218590 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.218607 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:26Z","lastTransitionTime":"2026-02-02T22:35:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.321857 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.321919 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.321940 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.321964 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.321984 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:26Z","lastTransitionTime":"2026-02-02T22:35:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.424532 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.424581 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.424592 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.424608 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.424619 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:26Z","lastTransitionTime":"2026-02-02T22:35:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.527295 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.527361 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.527382 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.527405 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.527423 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:26Z","lastTransitionTime":"2026-02-02T22:35:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.630790 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.630844 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.630867 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.630897 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.630919 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:26Z","lastTransitionTime":"2026-02-02T22:35:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.734317 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.734374 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.734393 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.734420 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.734439 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:26Z","lastTransitionTime":"2026-02-02T22:35:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.837971 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.838075 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.838101 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.838130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.838152 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:26Z","lastTransitionTime":"2026-02-02T22:35:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.940890 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.940942 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.940956 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.940974 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:26 crc kubenswrapper[4755]: I0202 22:35:26.940989 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:26Z","lastTransitionTime":"2026-02-02T22:35:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.043871 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.043923 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.043935 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.043953 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.043969 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:27Z","lastTransitionTime":"2026-02-02T22:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.044546 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 15:31:27.162194746 +0000 UTC Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.068260 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:27 crc kubenswrapper[4755]: E0202 22:35:27.068473 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.069230 4755 scope.go:117] "RemoveContainer" containerID="f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.147176 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.147581 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.147610 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.147640 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.147664 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:27Z","lastTransitionTime":"2026-02-02T22:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.250667 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.250780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.250808 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.250835 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.250857 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:27Z","lastTransitionTime":"2026-02-02T22:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.353171 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.353217 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.353230 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.353247 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.353259 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:27Z","lastTransitionTime":"2026-02-02T22:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.455431 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.455474 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.455486 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.455504 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.455517 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:27Z","lastTransitionTime":"2026-02-02T22:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.558966 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.559041 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.559066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.559115 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.559157 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:27Z","lastTransitionTime":"2026-02-02T22:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.641262 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/2.log" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.645824 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f"} Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.646957 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.661900 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.661966 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.661985 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.662013 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.662037 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:27Z","lastTransitionTime":"2026-02-02T22:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.676623 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.701169 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.721554 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.741180 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.761255 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9261f8876de2e09d0918c8008e1c4bde12d29318d43a80e47aa7d6e6cfc35b2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:18Z\\\",\\\"message\\\":\\\"2026-02-02T22:34:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e\\\\n2026-02-02T22:34:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e to /host/opt/cni/bin/\\\\n2026-02-02T22:34:33Z [verbose] multus-daemon started\\\\n2026-02-02T22:34:33Z [verbose] Readiness Indicator file check\\\\n2026-02-02T22:35:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:35:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.765697 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.765751 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.765763 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.765780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.765793 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:27Z","lastTransitionTime":"2026-02-02T22:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.781571 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console/downloads]} name:Service_openshift-console/downloads_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.213:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2ead45b3-c313-4fbc-a7bc-2b3c4ffd610c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:35:01.084987 6433 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:35:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.793415 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.806751 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.819276 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.830661 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.839503 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.850461 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0803e3-8271-4b9e-a956-d29ba8f7182f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f27cf58110e22dee25be7cc3bc69eb426ed482cc82dabe8fc748e22863444b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53dc6e93a1cf31d4ee15672f5a77cb4492586fdd71bf9950b79d82668f1fa483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53dc6e93a1cf31d4ee15672f5a77cb4492586fdd71bf9950b79d82668f1fa483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.866375 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.868325 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.868368 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.868382 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.868406 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.868420 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:27Z","lastTransitionTime":"2026-02-02T22:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.881721 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.896538 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.911714 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.928751 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:27Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.970700 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.970752 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.970764 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.970782 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:27 crc kubenswrapper[4755]: I0202 22:35:27.970794 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:27Z","lastTransitionTime":"2026-02-02T22:35:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.045546 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 12:51:14.095805737 +0000 UTC Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.067807 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.067852 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.067889 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:28 crc kubenswrapper[4755]: E0202 22:35:28.067968 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:28 crc kubenswrapper[4755]: E0202 22:35:28.068054 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:28 crc kubenswrapper[4755]: E0202 22:35:28.068250 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.074044 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.074111 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.074136 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.074167 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.074189 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:28Z","lastTransitionTime":"2026-02-02T22:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.177587 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.177653 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.177673 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.177700 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.177719 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:28Z","lastTransitionTime":"2026-02-02T22:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.280535 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.280615 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.280634 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.280660 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.280678 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:28Z","lastTransitionTime":"2026-02-02T22:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.384140 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.384195 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.384208 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.384227 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.384239 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:28Z","lastTransitionTime":"2026-02-02T22:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.487078 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.487151 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.487177 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.487207 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.487228 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:28Z","lastTransitionTime":"2026-02-02T22:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.590367 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.590439 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.590457 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.590482 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.590499 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:28Z","lastTransitionTime":"2026-02-02T22:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.651995 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/3.log" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.653115 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/2.log" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.657587 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f" exitCode=1 Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.657643 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f"} Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.657714 4755 scope.go:117] "RemoveContainer" containerID="f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.658770 4755 scope.go:117] "RemoveContainer" containerID="23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f" Feb 02 22:35:28 crc kubenswrapper[4755]: E0202 22:35:28.659078 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.678020 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.692586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.692651 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.692669 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.692693 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.692710 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:28Z","lastTransitionTime":"2026-02-02T22:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.699146 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.717476 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.733491 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.749346 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0803e3-8271-4b9e-a956-d29ba8f7182f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f27cf58110e22dee25be7cc3bc69eb426ed482cc82dabe8fc748e22863444b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53dc6e93a1cf31d4ee15672f5a77cb4492586fdd71bf9950b79d82668f1fa483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53dc6e93a1cf31d4ee15672f5a77cb4492586fdd71bf9950b79d82668f1fa483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.772928 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.791274 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.795545 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.795612 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.795633 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.795659 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.795677 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:28Z","lastTransitionTime":"2026-02-02T22:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.806101 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.823101 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.848021 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.865390 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.881708 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.898837 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.898909 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.898928 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.898954 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.898972 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:28Z","lastTransitionTime":"2026-02-02T22:35:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.900348 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.917086 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.936627 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.957346 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9261f8876de2e09d0918c8008e1c4bde12d29318d43a80e47aa7d6e6cfc35b2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:18Z\\\",\\\"message\\\":\\\"2026-02-02T22:34:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e\\\\n2026-02-02T22:34:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e to /host/opt/cni/bin/\\\\n2026-02-02T22:34:33Z [verbose] multus-daemon started\\\\n2026-02-02T22:34:33Z [verbose] Readiness Indicator file check\\\\n2026-02-02T22:35:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:35:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:28 crc kubenswrapper[4755]: I0202 22:35:28.990552 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f328e32897ff26f0f2d962c617ef49fe3c2462c9f1aea3d02508aaabbcb1a75b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:01Z\\\",\\\"message\\\":\\\"Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-console/downloads]} name:Service_openshift-console/downloads_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.213:80:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {2ead45b3-c313-4fbc-a7bc-2b3c4ffd610c}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 22:35:01.084987 6433 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:35:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:28Z\\\",\\\"message\\\":\\\"emoved *v1.NetworkPolicy event handler 4\\\\nI0202 22:35:28.154907 6857 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 22:35:28.154959 6857 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0202 22:35:28.154977 6857 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 22:35:28.154987 6857 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 22:35:28.154988 6857 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 22:35:28.155009 6857 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 22:35:28.155011 6857 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 22:35:28.155029 6857 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 22:35:28.155043 6857 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 22:35:28.155057 6857 factory.go:656] Stopping watch factory\\\\nI0202 22:35:28.155077 6857 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 22:35:28.155385 6857 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 22:35:28.155499 6857 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 22:35:28.155545 6857 ovnkube.go:599] Stopped ovnkube\\\\nI0202 22:35:28.155577 6857 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 22:35:28.155694 6857 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:35:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:28Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.001181 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.001256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.001289 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.001318 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.001340 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:29Z","lastTransitionTime":"2026-02-02T22:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.046139 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 13:57:03.93949296 +0000 UTC Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.068690 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:29 crc kubenswrapper[4755]: E0202 22:35:29.068967 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.499749 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.499789 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.499799 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.499812 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.499823 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:29Z","lastTransitionTime":"2026-02-02T22:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.602899 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.602955 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.602973 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.602997 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.603014 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:29Z","lastTransitionTime":"2026-02-02T22:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.663942 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/3.log" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.669310 4755 scope.go:117] "RemoveContainer" containerID="23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f" Feb 02 22:35:29 crc kubenswrapper[4755]: E0202 22:35:29.669601 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.694972 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.706338 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.706400 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.706419 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.706443 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.706464 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:29Z","lastTransitionTime":"2026-02-02T22:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.709864 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0803e3-8271-4b9e-a956-d29ba8f7182f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f27cf58110e22dee25be7cc3bc69eb426ed482cc82dabe8fc748e22863444b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53dc6e93a1cf31d4ee15672f5a77cb4492586fdd71bf9950b79d82668f1fa483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53dc6e93a1cf31d4ee15672f5a77cb4492586fdd71bf9950b79d82668f1fa483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.727986 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.748662 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.769974 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.791555 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.809114 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.809181 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.809200 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.809224 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.809242 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:29Z","lastTransitionTime":"2026-02-02T22:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.810690 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.848761 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:28Z\\\",\\\"message\\\":\\\"emoved *v1.NetworkPolicy event handler 4\\\\nI0202 22:35:28.154907 6857 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 22:35:28.154959 6857 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0202 22:35:28.154977 6857 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 22:35:28.154987 6857 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 22:35:28.154988 6857 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 22:35:28.155009 6857 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 22:35:28.155011 6857 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 22:35:28.155029 6857 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 22:35:28.155043 6857 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 22:35:28.155057 6857 factory.go:656] Stopping watch factory\\\\nI0202 22:35:28.155077 6857 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 22:35:28.155385 6857 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 22:35:28.155499 6857 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 22:35:28.155545 6857 ovnkube.go:599] Stopped ovnkube\\\\nI0202 22:35:28.155577 6857 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 22:35:28.155694 6857 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:35:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.867122 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.884956 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.906045 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.911712 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.911788 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.911804 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.911825 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.911841 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:29Z","lastTransitionTime":"2026-02-02T22:35:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.926376 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.950233 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9261f8876de2e09d0918c8008e1c4bde12d29318d43a80e47aa7d6e6cfc35b2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:18Z\\\",\\\"message\\\":\\\"2026-02-02T22:34:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e\\\\n2026-02-02T22:34:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e to /host/opt/cni/bin/\\\\n2026-02-02T22:34:33Z [verbose] multus-daemon started\\\\n2026-02-02T22:34:33Z [verbose] Readiness Indicator file check\\\\n2026-02-02T22:35:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:35:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:29 crc kubenswrapper[4755]: I0202 22:35:29.969672 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:29Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.016563 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.016626 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.016644 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.016672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.016691 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:30Z","lastTransitionTime":"2026-02-02T22:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.018076 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.027449 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.027594 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:34.027575214 +0000 UTC m=+149.718795540 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.027640 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.027741 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.027772 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.027796 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.027914 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.027911 4755 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.028003 4755 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.028018 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:36:34.027994616 +0000 UTC m=+149.719214972 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.028161 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 22:36:34.02813081 +0000 UTC m=+149.719351166 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.028010 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.028204 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.028301 4755 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.027927 4755 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.028422 4755 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.028532 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 22:36:34.028502651 +0000 UTC m=+149.719723017 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.028572 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 22:36:34.028558182 +0000 UTC m=+149.719778538 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.036984 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.046893 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 11:17:38.434850729 +0000 UTC Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.053065 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:30Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.068618 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.068678 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.068813 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.068834 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.069002 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:30 crc kubenswrapper[4755]: E0202 22:35:30.069140 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.120093 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.120145 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.120165 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.120202 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.120221 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:30Z","lastTransitionTime":"2026-02-02T22:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.223812 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.223906 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.223936 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.223970 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.223996 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:30Z","lastTransitionTime":"2026-02-02T22:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.327234 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.327300 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.327319 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.327344 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.327363 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:30Z","lastTransitionTime":"2026-02-02T22:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.431267 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.431331 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.431354 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.431379 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.431397 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:30Z","lastTransitionTime":"2026-02-02T22:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.534709 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.534829 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.534848 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.534873 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.534892 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:30Z","lastTransitionTime":"2026-02-02T22:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.640161 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.640234 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.640243 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.640259 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.640269 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:30Z","lastTransitionTime":"2026-02-02T22:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.743043 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.743120 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.743147 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.743183 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.743206 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:30Z","lastTransitionTime":"2026-02-02T22:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.846167 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.846249 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.846267 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.846291 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.846319 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:30Z","lastTransitionTime":"2026-02-02T22:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.949038 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.949102 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.949120 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.949143 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:30 crc kubenswrapper[4755]: I0202 22:35:30.949159 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:30Z","lastTransitionTime":"2026-02-02T22:35:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.048047 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 14:38:31.198928401 +0000 UTC Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.052325 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.052377 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.052401 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.052428 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.052451 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:31Z","lastTransitionTime":"2026-02-02T22:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.068035 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:31 crc kubenswrapper[4755]: E0202 22:35:31.068204 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.156084 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.156149 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.156169 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.156194 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.156213 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:31Z","lastTransitionTime":"2026-02-02T22:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.259302 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.259353 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.259370 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.259398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.259414 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:31Z","lastTransitionTime":"2026-02-02T22:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.362586 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.362922 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.363065 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.363247 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.363372 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:31Z","lastTransitionTime":"2026-02-02T22:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.466356 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.466404 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.466421 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.466443 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.466460 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:31Z","lastTransitionTime":"2026-02-02T22:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.569749 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.569795 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.569807 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.569841 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.569853 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:31Z","lastTransitionTime":"2026-02-02T22:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.673003 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.673079 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.673103 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.673131 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.673154 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:31Z","lastTransitionTime":"2026-02-02T22:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.776256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.776616 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.776802 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.776960 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.777141 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:31Z","lastTransitionTime":"2026-02-02T22:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.879930 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.879976 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.879989 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.880008 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.880021 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:31Z","lastTransitionTime":"2026-02-02T22:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.982701 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.982763 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.982777 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.982794 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:31 crc kubenswrapper[4755]: I0202 22:35:31.982807 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:31Z","lastTransitionTime":"2026-02-02T22:35:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.049278 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 21:15:57.59283427 +0000 UTC Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.067945 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.067971 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:32 crc kubenswrapper[4755]: E0202 22:35:32.068072 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.068115 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:32 crc kubenswrapper[4755]: E0202 22:35:32.068309 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:32 crc kubenswrapper[4755]: E0202 22:35:32.068352 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.085836 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.085916 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.085943 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.085987 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.086008 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.188752 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.188810 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.188830 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.188853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.188870 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.292225 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.292274 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.292295 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.292320 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.292337 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.395201 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.395250 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.395269 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.395292 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.395329 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.498124 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.498192 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.498216 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.498243 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.498265 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.601923 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.601997 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.602020 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.602049 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.602069 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.665434 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.665493 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.665509 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.665530 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.665547 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: E0202 22:35:32.686091 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.691062 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.691098 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.691130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.691149 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.691160 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: E0202 22:35:32.712026 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.717299 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.717361 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.717380 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.717405 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.717424 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: E0202 22:35:32.740322 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.745356 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.745398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.745411 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.745450 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.745465 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: E0202 22:35:32.766083 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.771404 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.771477 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.771491 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.771536 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.771554 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: E0202 22:35:32.791147 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:32Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:32 crc kubenswrapper[4755]: E0202 22:35:32.791328 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.793445 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.793470 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.793503 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.793518 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.793529 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.897103 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.897158 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.897176 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.897199 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:32 crc kubenswrapper[4755]: I0202 22:35:32.897216 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:32Z","lastTransitionTime":"2026-02-02T22:35:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.001275 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.001344 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.001363 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.001391 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.001409 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:33Z","lastTransitionTime":"2026-02-02T22:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.049866 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 00:32:20.358754592 +0000 UTC Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.068839 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:33 crc kubenswrapper[4755]: E0202 22:35:33.069038 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.104266 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.104328 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.104348 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.104373 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.104391 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:33Z","lastTransitionTime":"2026-02-02T22:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.207394 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.207460 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.207477 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.207501 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.207519 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:33Z","lastTransitionTime":"2026-02-02T22:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.310931 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.311000 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.311018 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.311044 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.311061 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:33Z","lastTransitionTime":"2026-02-02T22:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.413794 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.413867 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.413894 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.413925 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.413949 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:33Z","lastTransitionTime":"2026-02-02T22:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.517521 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.517602 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.517622 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.517654 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.517679 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:33Z","lastTransitionTime":"2026-02-02T22:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.621211 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.621276 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.621301 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.621330 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.621353 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:33Z","lastTransitionTime":"2026-02-02T22:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.725024 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.725079 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.725091 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.725108 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.725121 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:33Z","lastTransitionTime":"2026-02-02T22:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.828642 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.828705 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.828722 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.828780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.828800 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:33Z","lastTransitionTime":"2026-02-02T22:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.932228 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.932291 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.932312 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.932337 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:33 crc kubenswrapper[4755]: I0202 22:35:33.932355 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:33Z","lastTransitionTime":"2026-02-02T22:35:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.035554 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.035652 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.035670 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.035694 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.035715 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:34Z","lastTransitionTime":"2026-02-02T22:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.050194 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 13:36:59.894942961 +0000 UTC Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.068818 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.068925 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.068818 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:34 crc kubenswrapper[4755]: E0202 22:35:34.069035 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:34 crc kubenswrapper[4755]: E0202 22:35:34.069281 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:34 crc kubenswrapper[4755]: E0202 22:35:34.069424 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.138841 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.138964 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.138985 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.139007 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.139024 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:34Z","lastTransitionTime":"2026-02-02T22:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.242717 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.242805 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.242824 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.242848 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.242873 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:34Z","lastTransitionTime":"2026-02-02T22:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.346597 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.346681 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.346707 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.346789 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.346817 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:34Z","lastTransitionTime":"2026-02-02T22:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.450300 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.450371 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.450400 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.450431 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.450453 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:34Z","lastTransitionTime":"2026-02-02T22:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.553640 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.553714 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.553767 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.553797 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.553823 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:34Z","lastTransitionTime":"2026-02-02T22:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.657044 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.657091 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.657108 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.657130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.657146 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:34Z","lastTransitionTime":"2026-02-02T22:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.760138 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.760204 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.760223 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.760246 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.760263 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:34Z","lastTransitionTime":"2026-02-02T22:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.862802 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.862867 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.862884 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.862907 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.862925 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:34Z","lastTransitionTime":"2026-02-02T22:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.965999 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.966142 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.966169 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.966199 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:34 crc kubenswrapper[4755]: I0202 22:35:34.966222 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:34Z","lastTransitionTime":"2026-02-02T22:35:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.051305 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 22:12:22.170825219 +0000 UTC Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.067911 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:35 crc kubenswrapper[4755]: E0202 22:35:35.068157 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.069367 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.069508 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.069529 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.069553 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.069590 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:35Z","lastTransitionTime":"2026-02-02T22:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.086931 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4j9p7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11c7fccd-43ce-4bdf-9c2a-303a76526672\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03b82e92315ac0e09086b3bc21365863a86733313c73dc9d3be5734fd8d2e16a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zd54h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4j9p7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.106351 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ca3b55ffddd2e5edb70ebbd2e42e3ddd499ac7dd15ff9ed46edf92a8e4aa78c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.125510 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.142592 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc686b0f-8473-46b8-9d5e-abcddcca635f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bf2359c43b0a4aa5dbde2e8b9cc216cf9dc95e30519ded850302bffebaa09f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f4k9p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-8q4mc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.161522 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.175578 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.175724 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.175869 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.175897 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.175916 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:35Z","lastTransitionTime":"2026-02-02T22:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.190841 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1669654339b17d221be9aeb58cbb2df9e72dced1d2a93913ac7e5fda56abd74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://937be8d8e3349ee3ab1c5fd9e66212e0e0947987abd576c2e7693dc660822f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.219596 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9c0bf0a-73a9-42db-8ae6-716f712c0701\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d62d1fb4c98d8befddb7a11dab78ddfc07c92856c31f986f7e68dc413551923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40d80d4e9c94e073422a1cd233c6800224f4a0d35ba9c369915d2e6b1b7a5caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a2d7574eeb038c010e5684fee6f6cb622f177a19b9163ba3b94283ccdaec6dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ca4e7249648b1461d090960cea0661ddad05ecbd7c6fe4df46ea195444fb8dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d0dc6f11c00baeb9ba1dd065a54a9a526bc6f7f5d25f2e7a818d8c3247b367c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ac77a8abd95accd9d8305c0be900beba7a987eee9160dad73c59e9598e2542d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87c7874be9e88c9cfd0daac925e1597ce1b8a23ff32ba4b8b93f26adb53a7d7d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-twttk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgbrf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.234062 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb0803e3-8271-4b9e-a956-d29ba8f7182f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f27cf58110e22dee25be7cc3bc69eb426ed482cc82dabe8fc748e22863444b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53dc6e93a1cf31d4ee15672f5a77cb4492586fdd71bf9950b79d82668f1fa483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53dc6e93a1cf31d4ee15672f5a77cb4492586fdd71bf9950b79d82668f1fa483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.253324 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eea10ee7-ddaf-4f3c-86b1-82410e04081a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"file observer\\\\nW0202 22:34:24.947513 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 22:34:24.947778 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 22:34:24.948893 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-436831588/tls.crt::/tmp/serving-cert-436831588/tls.key\\\\\\\"\\\\nI0202 22:34:25.208811 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 22:34:25.212782 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 22:34:25.212814 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 22:34:25.212851 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 22:34:25.212861 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 22:34:25.220697 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 22:34:25.220748 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 22:34:25.220765 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220778 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 22:34:25.220793 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 22:34:25.220801 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 22:34:25.220811 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 22:34:25.220819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 22:34:25.223443 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:08Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.271023 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.279109 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.279305 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.279398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.279484 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.279559 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:35Z","lastTransitionTime":"2026-02-02T22:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.285404 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e5c9d46-6238-45de-b6ed-f633e92b3728\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7aa1ae9141eb375e7a9daa30bed33432bee7b5e963bee04bc20e8f8da94a31d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4699e555a7f6d6a8245cd7efa09afc179d7a17903477c520a4aad929ab4bc218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxztl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6ljzc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.299612 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-rdh9w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"96c13e34-9c0d-4838-8353-f4ebd83ecf06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37cce79207a7064ef5848237e5b5dda1748185f7433c8bb950b49a790284a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqnlf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-rdh9w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.316461 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5fdlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c206b6fd-200d-47ea-88a5-453f3093c749\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9261f8876de2e09d0918c8008e1c4bde12d29318d43a80e47aa7d6e6cfc35b2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:18Z\\\",\\\"message\\\":\\\"2026-02-02T22:34:33+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e\\\\n2026-02-02T22:34:33+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9d7d7869-ac77-4cdc-9536-228460d3040e to /host/opt/cni/bin/\\\\n2026-02-02T22:34:33Z [verbose] multus-daemon started\\\\n2026-02-02T22:34:33Z [verbose] Readiness Indicator file check\\\\n2026-02-02T22:35:18Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:35:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cmmxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5fdlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.345897 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae78d89e-7970-49df-8839-b1b6d7de4ec1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T22:35:28Z\\\",\\\"message\\\":\\\"emoved *v1.NetworkPolicy event handler 4\\\\nI0202 22:35:28.154907 6857 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 22:35:28.154959 6857 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0202 22:35:28.154977 6857 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 22:35:28.154987 6857 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 22:35:28.154988 6857 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 22:35:28.155009 6857 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 22:35:28.155011 6857 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 22:35:28.155029 6857 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 22:35:28.155043 6857 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 22:35:28.155057 6857 factory.go:656] Stopping watch factory\\\\nI0202 22:35:28.155077 6857 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 22:35:28.155385 6857 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0202 22:35:28.155499 6857 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0202 22:35:28.155545 6857 ovnkube.go:599] Stopped ovnkube\\\\nI0202 22:35:28.155577 6857 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 22:35:28.155694 6857 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T22:35:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rw4t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4mblb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.357679 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-k8tml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c55pv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-k8tml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.371100 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5dfdc9-93c3-442e-b9ac-1edbde8b5356\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1537c30f4102532e8b02d31e92eb43916f7810a8b082d2b564c45d2c269fe8c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e11caaf9b5dd96ed1cf9718112971ce1e059fbde5ce0bd7e43c326515db990d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88e31bc33ec540bf5097cb85062e7681f619e80680c68169fd7a10c5086c3f13\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://423904baf4d890487e5dd36dc9a0fc7de809645030c9f108634e1a177118b7ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T22:34:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T22:34:06Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T22:34:05Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.381918 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.381949 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.381961 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.381977 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.381988 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:35Z","lastTransitionTime":"2026-02-02T22:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.386416 4755 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T22:34:27Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82cbe24a1fe3a7d1eedbed8cf6555aef652591bfb756aad721f1990941172726\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T22:34:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:35Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.484255 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.484306 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.484329 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.484357 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.484377 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:35Z","lastTransitionTime":"2026-02-02T22:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.586908 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.586955 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.586972 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.586994 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.587014 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:35Z","lastTransitionTime":"2026-02-02T22:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.691250 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.691603 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.691621 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.691645 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.691661 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:35Z","lastTransitionTime":"2026-02-02T22:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.794292 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.794348 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.794366 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.794391 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.794409 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:35Z","lastTransitionTime":"2026-02-02T22:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.897169 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.897223 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.897245 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.897277 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:35 crc kubenswrapper[4755]: I0202 22:35:35.897294 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:35Z","lastTransitionTime":"2026-02-02T22:35:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.001190 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.001294 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.001354 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.001381 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.001438 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:36Z","lastTransitionTime":"2026-02-02T22:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.051922 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 12:03:53.672513034 +0000 UTC Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.068318 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.068340 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.068391 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:36 crc kubenswrapper[4755]: E0202 22:35:36.068481 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:36 crc kubenswrapper[4755]: E0202 22:35:36.068598 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:36 crc kubenswrapper[4755]: E0202 22:35:36.068833 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.104473 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.104531 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.104553 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.104577 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.104595 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:36Z","lastTransitionTime":"2026-02-02T22:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.207233 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.207290 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.207307 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.207330 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.207347 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:36Z","lastTransitionTime":"2026-02-02T22:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.310195 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.310270 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.310293 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.310323 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.310345 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:36Z","lastTransitionTime":"2026-02-02T22:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.412985 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.413048 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.413066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.413089 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.413136 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:36Z","lastTransitionTime":"2026-02-02T22:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.516267 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.516354 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.516379 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.516410 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.516433 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:36Z","lastTransitionTime":"2026-02-02T22:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.619042 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.619118 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.619137 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.619162 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.619182 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:36Z","lastTransitionTime":"2026-02-02T22:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.726873 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.726965 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.726986 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.727011 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.727035 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:36Z","lastTransitionTime":"2026-02-02T22:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.830417 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.830473 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.830490 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.830513 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.830529 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:36Z","lastTransitionTime":"2026-02-02T22:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.933368 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.933527 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.933553 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.933583 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:36 crc kubenswrapper[4755]: I0202 22:35:36.933603 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:36Z","lastTransitionTime":"2026-02-02T22:35:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.036546 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.036612 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.036634 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.036657 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.036673 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:37Z","lastTransitionTime":"2026-02-02T22:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.052832 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 05:18:45.172592052 +0000 UTC Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.068289 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:37 crc kubenswrapper[4755]: E0202 22:35:37.068854 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.139687 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.139778 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.139798 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.139822 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.139841 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:37Z","lastTransitionTime":"2026-02-02T22:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.242848 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.242965 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.242992 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.243017 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.243035 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:37Z","lastTransitionTime":"2026-02-02T22:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.346166 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.346216 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.346232 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.346257 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.346277 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:37Z","lastTransitionTime":"2026-02-02T22:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.449115 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.449181 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.449215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.449247 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.449270 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:37Z","lastTransitionTime":"2026-02-02T22:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.551716 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.551835 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.551857 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.551880 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.551897 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:37Z","lastTransitionTime":"2026-02-02T22:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.655250 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.655304 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.655324 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.655348 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.655365 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:37Z","lastTransitionTime":"2026-02-02T22:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.757666 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.757780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.757815 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.757846 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.757868 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:37Z","lastTransitionTime":"2026-02-02T22:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.860689 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.860787 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.860812 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.860841 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.860862 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:37Z","lastTransitionTime":"2026-02-02T22:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.963307 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.963362 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.963381 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.963403 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:37 crc kubenswrapper[4755]: I0202 22:35:37.963419 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:37Z","lastTransitionTime":"2026-02-02T22:35:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.053133 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 09:13:03.441829285 +0000 UTC Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.066298 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.066349 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.066369 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.066398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.066419 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:38Z","lastTransitionTime":"2026-02-02T22:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.068578 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:38 crc kubenswrapper[4755]: E0202 22:35:38.068717 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.069017 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.069037 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:38 crc kubenswrapper[4755]: E0202 22:35:38.069226 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:38 crc kubenswrapper[4755]: E0202 22:35:38.069288 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.168946 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.169013 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.169032 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.169055 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.169071 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:38Z","lastTransitionTime":"2026-02-02T22:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.271598 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.271672 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.271696 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.271767 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.271797 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:38Z","lastTransitionTime":"2026-02-02T22:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.375148 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.375229 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.375246 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.375272 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.375296 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:38Z","lastTransitionTime":"2026-02-02T22:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.478598 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.478680 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.478697 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.478756 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.478775 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:38Z","lastTransitionTime":"2026-02-02T22:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.582287 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.582350 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.582367 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.582392 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.582409 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:38Z","lastTransitionTime":"2026-02-02T22:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.685643 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.685715 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.685759 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.685784 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.685803 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:38Z","lastTransitionTime":"2026-02-02T22:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.789297 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.789368 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.789391 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.789420 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.789445 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:38Z","lastTransitionTime":"2026-02-02T22:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.892026 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.892135 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.892156 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.892183 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.892208 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:38Z","lastTransitionTime":"2026-02-02T22:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.995100 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.995166 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.995202 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.995240 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:38 crc kubenswrapper[4755]: I0202 22:35:38.995265 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:38Z","lastTransitionTime":"2026-02-02T22:35:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.053456 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 03:22:39.164870046 +0000 UTC Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.069045 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:39 crc kubenswrapper[4755]: E0202 22:35:39.069228 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.097787 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.097852 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.097876 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.097905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.097934 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:39Z","lastTransitionTime":"2026-02-02T22:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.200537 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.200609 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.200629 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.200661 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.200686 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:39Z","lastTransitionTime":"2026-02-02T22:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.303261 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.303328 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.303354 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.303383 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.303402 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:39Z","lastTransitionTime":"2026-02-02T22:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.407157 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.407215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.407234 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.407262 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.407280 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:39Z","lastTransitionTime":"2026-02-02T22:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.511865 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.511924 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.511942 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.511967 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.511985 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:39Z","lastTransitionTime":"2026-02-02T22:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.615299 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.615360 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.615409 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.615439 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.615460 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:39Z","lastTransitionTime":"2026-02-02T22:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.718318 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.718388 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.718412 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.718441 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.718462 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:39Z","lastTransitionTime":"2026-02-02T22:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.821107 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.821173 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.821198 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.821229 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.821253 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:39Z","lastTransitionTime":"2026-02-02T22:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.924701 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.924825 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.924848 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.924877 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:39 crc kubenswrapper[4755]: I0202 22:35:39.924897 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:39Z","lastTransitionTime":"2026-02-02T22:35:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.028790 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.028854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.028874 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.028900 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.028916 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:40Z","lastTransitionTime":"2026-02-02T22:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.053913 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 09:15:49.212257236 +0000 UTC Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.068261 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.068304 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.068416 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:40 crc kubenswrapper[4755]: E0202 22:35:40.068634 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:40 crc kubenswrapper[4755]: E0202 22:35:40.068819 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:40 crc kubenswrapper[4755]: E0202 22:35:40.069143 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.132123 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.132178 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.132197 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.132226 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.132245 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:40Z","lastTransitionTime":"2026-02-02T22:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.235612 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.235681 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.235701 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.235757 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.235777 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:40Z","lastTransitionTime":"2026-02-02T22:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.338955 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.339027 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.339046 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.339073 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.339091 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:40Z","lastTransitionTime":"2026-02-02T22:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.442766 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.442820 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.443384 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.443433 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.443451 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:40Z","lastTransitionTime":"2026-02-02T22:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.546292 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.546352 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.546367 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.546389 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.546405 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:40Z","lastTransitionTime":"2026-02-02T22:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.649720 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.649834 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.649854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.649879 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.649899 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:40Z","lastTransitionTime":"2026-02-02T22:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.753463 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.753585 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.753659 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.753696 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.753793 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:40Z","lastTransitionTime":"2026-02-02T22:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.856560 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.856639 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.856693 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.856772 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.856793 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:40Z","lastTransitionTime":"2026-02-02T22:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.959049 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.959113 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.959138 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.959166 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:40 crc kubenswrapper[4755]: I0202 22:35:40.959193 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:40Z","lastTransitionTime":"2026-02-02T22:35:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.054642 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 10:49:43.49260152 +0000 UTC Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.061724 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.061821 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.061839 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.061863 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.061883 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:41Z","lastTransitionTime":"2026-02-02T22:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.068459 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:41 crc kubenswrapper[4755]: E0202 22:35:41.068615 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.069774 4755 scope.go:117] "RemoveContainer" containerID="23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f" Feb 02 22:35:41 crc kubenswrapper[4755]: E0202 22:35:41.070052 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.164384 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.164456 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.164483 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.164512 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.164533 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:41Z","lastTransitionTime":"2026-02-02T22:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.267377 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.267497 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.267517 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.267543 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.267561 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:41Z","lastTransitionTime":"2026-02-02T22:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.370721 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.370807 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.370824 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.370848 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.370868 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:41Z","lastTransitionTime":"2026-02-02T22:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.473760 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.473808 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.473823 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.473843 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.473872 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:41Z","lastTransitionTime":"2026-02-02T22:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.576835 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.576905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.576924 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.576951 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.576970 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:41Z","lastTransitionTime":"2026-02-02T22:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.679996 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.680068 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.680092 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.680123 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.680151 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:41Z","lastTransitionTime":"2026-02-02T22:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.783375 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.783437 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.783455 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.783488 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.783506 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:41Z","lastTransitionTime":"2026-02-02T22:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.886616 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.886693 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.886719 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.886797 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.886821 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:41Z","lastTransitionTime":"2026-02-02T22:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.989120 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.989184 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.989206 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.989798 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:41 crc kubenswrapper[4755]: I0202 22:35:41.989878 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:41Z","lastTransitionTime":"2026-02-02T22:35:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.055832 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 22:43:10.784755164 +0000 UTC Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.068576 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.068710 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:42 crc kubenswrapper[4755]: E0202 22:35:42.068820 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:42 crc kubenswrapper[4755]: E0202 22:35:42.068909 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.069001 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:42 crc kubenswrapper[4755]: E0202 22:35:42.069263 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.093725 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.093885 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.093910 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.093932 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.093949 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.197185 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.197261 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.197279 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.197305 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.197323 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.300237 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.300297 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.300316 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.300342 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.300362 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.403479 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.403548 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.403566 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.403592 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.403609 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.507215 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.507279 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.507303 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.507330 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.507351 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.610049 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.610101 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.610121 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.610143 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.610160 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.713169 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.713243 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.713265 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.713294 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.713318 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.815580 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.815635 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.815653 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.815677 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.815696 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.833887 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.833962 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.833990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.834023 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.834046 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: E0202 22:35:42.854782 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.859544 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.859608 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.859628 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.859652 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.859676 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: E0202 22:35:42.879529 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.884402 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.884465 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.884488 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.884515 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.884536 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: E0202 22:35:42.903640 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.908676 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.908769 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.908797 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.908826 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.908849 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: E0202 22:35:42.929476 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.936060 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.936123 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.936146 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.936171 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.936188 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:42 crc kubenswrapper[4755]: E0202 22:35:42.957685 4755 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T22:35:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"2104f2f4-bdbb-4460-aabd-cf6d1f96bb63\\\",\\\"systemUUID\\\":\\\"3ead4062-ccf7-4dcd-8b02-0beb6e1ef76e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T22:35:42Z is after 2025-08-24T17:21:41Z" Feb 02 22:35:42 crc kubenswrapper[4755]: E0202 22:35:42.958001 4755 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.960486 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.960525 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.960539 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.960560 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:42 crc kubenswrapper[4755]: I0202 22:35:42.960575 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:42Z","lastTransitionTime":"2026-02-02T22:35:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.056924 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 12:05:49.637110939 +0000 UTC Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.063284 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.063373 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.063398 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.063428 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.063451 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:43Z","lastTransitionTime":"2026-02-02T22:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.067908 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:43 crc kubenswrapper[4755]: E0202 22:35:43.068302 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.166037 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.166112 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.166130 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.166154 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.166173 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:43Z","lastTransitionTime":"2026-02-02T22:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.269411 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.269491 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.269517 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.269546 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.269568 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:43Z","lastTransitionTime":"2026-02-02T22:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.372225 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.372289 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.372308 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.372336 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.372359 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:43Z","lastTransitionTime":"2026-02-02T22:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.474805 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.474880 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.474905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.474934 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.474956 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:43Z","lastTransitionTime":"2026-02-02T22:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.577937 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.578010 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.578033 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.578063 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.578088 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:43Z","lastTransitionTime":"2026-02-02T22:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.684854 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.684938 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.684968 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.684994 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.685014 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:43Z","lastTransitionTime":"2026-02-02T22:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.788288 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.788349 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.788367 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.788395 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.788414 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:43Z","lastTransitionTime":"2026-02-02T22:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.890875 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.890920 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.890930 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.890945 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.890955 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:43Z","lastTransitionTime":"2026-02-02T22:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.993807 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.993880 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.993904 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.993932 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:43 crc kubenswrapper[4755]: I0202 22:35:43.993953 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:43Z","lastTransitionTime":"2026-02-02T22:35:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.057054 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 15:11:56.804646877 +0000 UTC Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.068626 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.068692 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.068794 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:44 crc kubenswrapper[4755]: E0202 22:35:44.069004 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:44 crc kubenswrapper[4755]: E0202 22:35:44.069259 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:44 crc kubenswrapper[4755]: E0202 22:35:44.069705 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.094271 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.100682 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.100779 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.100799 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.100827 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.100847 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:44Z","lastTransitionTime":"2026-02-02T22:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.203843 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.203915 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.203945 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.203976 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.203999 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:44Z","lastTransitionTime":"2026-02-02T22:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.308032 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.308094 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.308115 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.308142 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.308165 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:44Z","lastTransitionTime":"2026-02-02T22:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.410925 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.410993 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.411012 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.411037 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.411055 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:44Z","lastTransitionTime":"2026-02-02T22:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.513782 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.513843 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.513860 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.513884 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.513903 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:44Z","lastTransitionTime":"2026-02-02T22:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.616803 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.616871 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.616891 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.616917 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.616936 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:44Z","lastTransitionTime":"2026-02-02T22:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.720199 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.720256 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.720274 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.720298 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.720324 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:44Z","lastTransitionTime":"2026-02-02T22:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.823933 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.824019 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.824044 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.824072 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.824096 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:44Z","lastTransitionTime":"2026-02-02T22:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.927499 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.927564 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.927587 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.927612 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:44 crc kubenswrapper[4755]: I0202 22:35:44.927631 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:44Z","lastTransitionTime":"2026-02-02T22:35:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.030406 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.030487 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.030512 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.030541 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.030564 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:45Z","lastTransitionTime":"2026-02-02T22:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.058154 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 20:42:21.958970113 +0000 UTC Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.067910 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:45 crc kubenswrapper[4755]: E0202 22:35:45.068136 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.112459 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6ljzc" podStartSLOduration=74.112440926 podStartE2EDuration="1m14.112440926s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:35:45.092384193 +0000 UTC m=+100.783604589" watchObservedRunningTime="2026-02-02 22:35:45.112440926 +0000 UTC m=+100.803661262" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.133641 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.133679 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.133691 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.133707 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.133720 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:45Z","lastTransitionTime":"2026-02-02T22:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.139309 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-rdh9w" podStartSLOduration=75.139281704 podStartE2EDuration="1m15.139281704s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:35:45.113195048 +0000 UTC m=+100.804415384" watchObservedRunningTime="2026-02-02 22:35:45.139281704 +0000 UTC m=+100.830502070" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.182903 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-5fdlw" podStartSLOduration=75.1828775 podStartE2EDuration="1m15.1828775s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:35:45.140112248 +0000 UTC m=+100.831332614" watchObservedRunningTime="2026-02-02 22:35:45.1828775 +0000 UTC m=+100.874097866" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.236567 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.236830 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.236961 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.237059 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.237150 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:45Z","lastTransitionTime":"2026-02-02T22:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.263150 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=52.263130425 podStartE2EDuration="52.263130425s" podCreationTimestamp="2026-02-02 22:34:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:35:45.263089604 +0000 UTC m=+100.954309950" watchObservedRunningTime="2026-02-02 22:35:45.263130425 +0000 UTC m=+100.954350761" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.263295 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=1.263286149 podStartE2EDuration="1.263286149s" podCreationTimestamp="2026-02-02 22:35:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:35:45.242849695 +0000 UTC m=+100.934070031" watchObservedRunningTime="2026-02-02 22:35:45.263286149 +0000 UTC m=+100.954506515" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.294755 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-4j9p7" podStartSLOduration=75.294707008 podStartE2EDuration="1m15.294707008s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:35:45.294605495 +0000 UTC m=+100.985825861" watchObservedRunningTime="2026-02-02 22:35:45.294707008 +0000 UTC m=+100.985927374" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.339960 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.340018 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.340036 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.340061 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.340081 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:45Z","lastTransitionTime":"2026-02-02T22:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.355506 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podStartSLOduration=75.355486106 podStartE2EDuration="1m15.355486106s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:35:45.354843567 +0000 UTC m=+101.046063933" watchObservedRunningTime="2026-02-02 22:35:45.355486106 +0000 UTC m=+101.046706442" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.432103 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=21.432074955 podStartE2EDuration="21.432074955s" podCreationTimestamp="2026-02-02 22:35:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:35:45.431426587 +0000 UTC m=+101.122646943" watchObservedRunningTime="2026-02-02 22:35:45.432074955 +0000 UTC m=+101.123295321" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.432475 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-pgbrf" podStartSLOduration=75.432469107 podStartE2EDuration="1m15.432469107s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:35:45.417860049 +0000 UTC m=+101.109080445" watchObservedRunningTime="2026-02-02 22:35:45.432469107 +0000 UTC m=+101.123689473" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.442615 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.442706 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.442724 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.442772 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.442793 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:45Z","lastTransitionTime":"2026-02-02T22:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.480071 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=79.480042717 podStartE2EDuration="1m19.480042717s" podCreationTimestamp="2026-02-02 22:34:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:35:45.45637104 +0000 UTC m=+101.147591406" watchObservedRunningTime="2026-02-02 22:35:45.480042717 +0000 UTC m=+101.171263053" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.546196 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.546254 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.546270 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.546295 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.546312 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:45Z","lastTransitionTime":"2026-02-02T22:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.649150 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.649253 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.649279 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.649706 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.649979 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:45Z","lastTransitionTime":"2026-02-02T22:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.752992 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.753059 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.753078 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.753103 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.753120 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:45Z","lastTransitionTime":"2026-02-02T22:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.856802 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.856887 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.856909 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.856943 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.856966 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:45Z","lastTransitionTime":"2026-02-02T22:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.960507 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.960576 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.960594 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.960619 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:45 crc kubenswrapper[4755]: I0202 22:35:45.960637 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:45Z","lastTransitionTime":"2026-02-02T22:35:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.059226 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 02:02:39.007504629 +0000 UTC Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.063769 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.063829 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.063851 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.063877 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.063895 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:46Z","lastTransitionTime":"2026-02-02T22:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.068354 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.068385 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.068394 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:46 crc kubenswrapper[4755]: E0202 22:35:46.068499 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:46 crc kubenswrapper[4755]: E0202 22:35:46.068653 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:46 crc kubenswrapper[4755]: E0202 22:35:46.068776 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.166343 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.166401 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.166419 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.166442 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.166460 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:46Z","lastTransitionTime":"2026-02-02T22:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.270128 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.270220 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.270245 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.270276 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.270298 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:46Z","lastTransitionTime":"2026-02-02T22:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.372932 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.373016 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.373040 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.373070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.373091 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:46Z","lastTransitionTime":"2026-02-02T22:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.476292 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.476372 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.476391 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.476419 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.476441 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:46Z","lastTransitionTime":"2026-02-02T22:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.579428 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.579492 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.579514 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.579538 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.579556 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:46Z","lastTransitionTime":"2026-02-02T22:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.682853 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.682929 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.682950 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.682979 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.682998 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:46Z","lastTransitionTime":"2026-02-02T22:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.786634 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.786706 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.786762 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.786792 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.786811 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:46Z","lastTransitionTime":"2026-02-02T22:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.889831 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.889894 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.889912 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.889936 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.889954 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:46Z","lastTransitionTime":"2026-02-02T22:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.992971 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.993031 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.993047 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.993069 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:46 crc kubenswrapper[4755]: I0202 22:35:46.993087 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:46Z","lastTransitionTime":"2026-02-02T22:35:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.060100 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 02:15:18.57136482 +0000 UTC Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.068633 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:47 crc kubenswrapper[4755]: E0202 22:35:47.068852 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.096148 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.096282 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.096308 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.096336 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.096358 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:47Z","lastTransitionTime":"2026-02-02T22:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.199316 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.199399 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.199432 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.199463 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.199483 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:47Z","lastTransitionTime":"2026-02-02T22:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.302655 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.302755 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.302769 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.302787 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.302798 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:47Z","lastTransitionTime":"2026-02-02T22:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.405765 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.405823 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.405840 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.405863 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.405885 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:47Z","lastTransitionTime":"2026-02-02T22:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.509311 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.509382 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.509400 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.509428 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.509446 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:47Z","lastTransitionTime":"2026-02-02T22:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.612558 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.612616 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.612635 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.612660 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.612678 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:47Z","lastTransitionTime":"2026-02-02T22:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.715851 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.715955 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.716053 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.716089 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.716113 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:47Z","lastTransitionTime":"2026-02-02T22:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.819322 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.819409 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.819430 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.819456 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.819474 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:47Z","lastTransitionTime":"2026-02-02T22:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.922056 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.922114 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.922134 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.922156 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:47 crc kubenswrapper[4755]: I0202 22:35:47.922173 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:47Z","lastTransitionTime":"2026-02-02T22:35:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.025878 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.025939 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.025956 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.025981 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.025999 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:48Z","lastTransitionTime":"2026-02-02T22:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.060780 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 01:46:21.509784424 +0000 UTC Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.068106 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.068181 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.068250 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:48 crc kubenswrapper[4755]: E0202 22:35:48.068431 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:48 crc kubenswrapper[4755]: E0202 22:35:48.068639 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:48 crc kubenswrapper[4755]: E0202 22:35:48.068823 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.128779 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.128836 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.128852 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.128873 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.128888 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:48Z","lastTransitionTime":"2026-02-02T22:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.232945 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.233070 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.233091 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.233115 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.233134 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:48Z","lastTransitionTime":"2026-02-02T22:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.335905 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.335989 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.336013 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.336042 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.336067 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:48Z","lastTransitionTime":"2026-02-02T22:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.439404 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.439520 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.439541 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.439574 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.439596 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:48Z","lastTransitionTime":"2026-02-02T22:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.542389 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.542439 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.542453 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.542471 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.542483 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:48Z","lastTransitionTime":"2026-02-02T22:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.645362 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.645425 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.645442 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.645465 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.645483 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:48Z","lastTransitionTime":"2026-02-02T22:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.747683 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.747780 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.747807 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.747838 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.747861 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:48Z","lastTransitionTime":"2026-02-02T22:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.850839 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.850945 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.850964 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.850990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.851007 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:48Z","lastTransitionTime":"2026-02-02T22:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.953488 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.953555 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.953575 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.953608 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:48 crc kubenswrapper[4755]: I0202 22:35:48.953631 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:48Z","lastTransitionTime":"2026-02-02T22:35:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.055963 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.056054 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.056085 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.056111 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.056146 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:49Z","lastTransitionTime":"2026-02-02T22:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.061475 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 00:24:49.353885203 +0000 UTC Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.068116 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:49 crc kubenswrapper[4755]: E0202 22:35:49.068274 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.159644 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.159715 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.159767 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.159793 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.159810 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:49Z","lastTransitionTime":"2026-02-02T22:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.262777 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.262857 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.262885 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.262915 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.262937 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:49Z","lastTransitionTime":"2026-02-02T22:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.365897 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.365975 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.365999 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.366029 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.366050 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:49Z","lastTransitionTime":"2026-02-02T22:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.469397 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.469452 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.469466 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.469484 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.469498 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:49Z","lastTransitionTime":"2026-02-02T22:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.560270 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:49 crc kubenswrapper[4755]: E0202 22:35:49.560455 4755 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:35:49 crc kubenswrapper[4755]: E0202 22:35:49.560557 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs podName:3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923 nodeName:}" failed. No retries permitted until 2026-02-02 22:36:53.560528874 +0000 UTC m=+169.251749240 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs") pod "network-metrics-daemon-k8tml" (UID: "3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.572084 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.572165 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.572188 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.572217 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.572243 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:49Z","lastTransitionTime":"2026-02-02T22:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.675150 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.675202 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.675218 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.675240 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.675260 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:49Z","lastTransitionTime":"2026-02-02T22:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.778562 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.778644 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.778674 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.778704 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.778756 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:49Z","lastTransitionTime":"2026-02-02T22:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.881878 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.881949 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.881966 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.881989 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.882008 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:49Z","lastTransitionTime":"2026-02-02T22:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.985536 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.985596 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.985613 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.985637 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:49 crc kubenswrapper[4755]: I0202 22:35:49.985654 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:49Z","lastTransitionTime":"2026-02-02T22:35:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.062535 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 02:47:59.829574768 +0000 UTC Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.068133 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.068216 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.068159 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:50 crc kubenswrapper[4755]: E0202 22:35:50.068346 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:50 crc kubenswrapper[4755]: E0202 22:35:50.068443 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:50 crc kubenswrapper[4755]: E0202 22:35:50.068586 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.088116 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.088171 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.088198 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.088227 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.088249 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:50Z","lastTransitionTime":"2026-02-02T22:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.191109 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.191183 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.191205 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.191234 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.191257 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:50Z","lastTransitionTime":"2026-02-02T22:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.293837 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.293897 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.293915 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.293938 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.293955 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:50Z","lastTransitionTime":"2026-02-02T22:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.397559 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.397629 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.397648 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.397674 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.397691 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:50Z","lastTransitionTime":"2026-02-02T22:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.500355 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.500416 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.500435 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.500461 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.500479 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:50Z","lastTransitionTime":"2026-02-02T22:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.604028 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.604172 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.604202 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.604232 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.604254 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:50Z","lastTransitionTime":"2026-02-02T22:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.707333 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.707401 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.707427 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.707458 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.707534 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:50Z","lastTransitionTime":"2026-02-02T22:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.810179 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.810246 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.810264 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.810289 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.810312 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:50Z","lastTransitionTime":"2026-02-02T22:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.912635 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.912689 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.912710 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.912767 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:50 crc kubenswrapper[4755]: I0202 22:35:50.912786 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:50Z","lastTransitionTime":"2026-02-02T22:35:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.015901 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.015969 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.015987 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.016010 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.016029 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:51Z","lastTransitionTime":"2026-02-02T22:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.063381 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 12:38:20.521299409 +0000 UTC Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.068933 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:51 crc kubenswrapper[4755]: E0202 22:35:51.069424 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.119066 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.119141 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.119160 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.119184 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.119220 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:51Z","lastTransitionTime":"2026-02-02T22:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.221769 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.221828 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.221846 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.221869 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.221886 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:51Z","lastTransitionTime":"2026-02-02T22:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.323933 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.324075 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.324095 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.324120 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.324137 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:51Z","lastTransitionTime":"2026-02-02T22:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.426653 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.426718 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.426781 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.426812 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.426834 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:51Z","lastTransitionTime":"2026-02-02T22:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.530370 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.530439 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.530455 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.530479 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.530496 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:51Z","lastTransitionTime":"2026-02-02T22:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.634232 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.634311 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.634337 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.634369 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.634390 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:51Z","lastTransitionTime":"2026-02-02T22:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.737031 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.737093 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.737110 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.737135 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.737152 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:51Z","lastTransitionTime":"2026-02-02T22:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.840710 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.840815 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.840833 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.840863 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.840880 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:51Z","lastTransitionTime":"2026-02-02T22:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.943315 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.943419 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.943479 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.943506 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:51 crc kubenswrapper[4755]: I0202 22:35:51.943525 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:51Z","lastTransitionTime":"2026-02-02T22:35:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.051200 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.051280 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.051304 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.051335 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.051355 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:52Z","lastTransitionTime":"2026-02-02T22:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.063637 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 18:58:12.884986713 +0000 UTC Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.068800 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:52 crc kubenswrapper[4755]: E0202 22:35:52.068967 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.069018 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.069455 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:52 crc kubenswrapper[4755]: E0202 22:35:52.069543 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:52 crc kubenswrapper[4755]: E0202 22:35:52.069645 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.070070 4755 scope.go:117] "RemoveContainer" containerID="23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f" Feb 02 22:35:52 crc kubenswrapper[4755]: E0202 22:35:52.070331 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.154595 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.154656 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.154722 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.154772 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.155096 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:52Z","lastTransitionTime":"2026-02-02T22:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.258327 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.258414 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.258439 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.258471 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.258497 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:52Z","lastTransitionTime":"2026-02-02T22:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.362054 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.362106 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.362119 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.362139 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.362151 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:52Z","lastTransitionTime":"2026-02-02T22:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.466009 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.466100 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.466121 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.466149 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.466168 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:52Z","lastTransitionTime":"2026-02-02T22:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.569124 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.569191 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.569216 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.569246 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.569270 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:52Z","lastTransitionTime":"2026-02-02T22:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.672791 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.672846 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.672862 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.672886 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.672905 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:52Z","lastTransitionTime":"2026-02-02T22:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.775927 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.776017 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.776043 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.776079 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.776106 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:52Z","lastTransitionTime":"2026-02-02T22:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.878901 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.879318 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.879528 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.879782 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.879985 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:52Z","lastTransitionTime":"2026-02-02T22:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.983306 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.983387 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.983406 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.983431 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:52 crc kubenswrapper[4755]: I0202 22:35:52.983448 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:52Z","lastTransitionTime":"2026-02-02T22:35:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.064277 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 18:47:16.181489984 +0000 UTC Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.068817 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:53 crc kubenswrapper[4755]: E0202 22:35:53.069036 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.086184 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.086250 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.086275 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.086301 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.086318 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:53Z","lastTransitionTime":"2026-02-02T22:35:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.188885 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.188990 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.189011 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.189068 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.189086 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:53Z","lastTransitionTime":"2026-02-02T22:35:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.227326 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.227385 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.227403 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.227426 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.227442 4755 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T22:35:53Z","lastTransitionTime":"2026-02-02T22:35:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.291560 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5"] Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.292170 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.295164 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.295327 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.297025 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.297798 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.407514 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/abf98821-8322-437a-8881-7459739d1fac-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.407926 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abf98821-8322-437a-8881-7459739d1fac-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.407985 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/abf98821-8322-437a-8881-7459739d1fac-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.408019 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/abf98821-8322-437a-8881-7459739d1fac-service-ca\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.408048 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/abf98821-8322-437a-8881-7459739d1fac-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.509024 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/abf98821-8322-437a-8881-7459739d1fac-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.509084 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/abf98821-8322-437a-8881-7459739d1fac-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.509124 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/abf98821-8322-437a-8881-7459739d1fac-service-ca\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.509244 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/abf98821-8322-437a-8881-7459739d1fac-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.509276 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abf98821-8322-437a-8881-7459739d1fac-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.509711 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/abf98821-8322-437a-8881-7459739d1fac-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.509856 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/abf98821-8322-437a-8881-7459739d1fac-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.510952 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/abf98821-8322-437a-8881-7459739d1fac-service-ca\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.519666 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abf98821-8322-437a-8881-7459739d1fac-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.537378 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/abf98821-8322-437a-8881-7459739d1fac-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-chwn5\" (UID: \"abf98821-8322-437a-8881-7459739d1fac\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.616832 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.759959 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" event={"ID":"abf98821-8322-437a-8881-7459739d1fac","Type":"ContainerStarted","Data":"46534786bd4847a4b37a7915938e6195c9c8b5a3509bd42c0a1ae0f3d4a4c19a"} Feb 02 22:35:53 crc kubenswrapper[4755]: I0202 22:35:53.760063 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" event={"ID":"abf98821-8322-437a-8881-7459739d1fac","Type":"ContainerStarted","Data":"327ae83b7bd4db4bb0f956218481b6161e2abaf65614ce9208bd0c0403de142e"} Feb 02 22:35:54 crc kubenswrapper[4755]: I0202 22:35:54.064495 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 21:24:29.568319483 +0000 UTC Feb 02 22:35:54 crc kubenswrapper[4755]: I0202 22:35:54.064673 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Feb 02 22:35:54 crc kubenswrapper[4755]: I0202 22:35:54.067819 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:54 crc kubenswrapper[4755]: I0202 22:35:54.067864 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:54 crc kubenswrapper[4755]: E0202 22:35:54.067997 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:54 crc kubenswrapper[4755]: I0202 22:35:54.068470 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:54 crc kubenswrapper[4755]: E0202 22:35:54.069074 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:54 crc kubenswrapper[4755]: E0202 22:35:54.068213 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:54 crc kubenswrapper[4755]: I0202 22:35:54.076958 4755 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 02 22:35:55 crc kubenswrapper[4755]: I0202 22:35:55.068634 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:55 crc kubenswrapper[4755]: E0202 22:35:55.071084 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:55 crc kubenswrapper[4755]: I0202 22:35:55.085685 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-chwn5" podStartSLOduration=85.085656778 podStartE2EDuration="1m25.085656778s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:35:54.783761137 +0000 UTC m=+110.474981503" watchObservedRunningTime="2026-02-02 22:35:55.085656778 +0000 UTC m=+110.776877144" Feb 02 22:35:55 crc kubenswrapper[4755]: I0202 22:35:55.085937 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Feb 02 22:35:56 crc kubenswrapper[4755]: I0202 22:35:56.068694 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:56 crc kubenswrapper[4755]: I0202 22:35:56.068762 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:56 crc kubenswrapper[4755]: E0202 22:35:56.068856 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:56 crc kubenswrapper[4755]: I0202 22:35:56.068909 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:56 crc kubenswrapper[4755]: E0202 22:35:56.069057 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:56 crc kubenswrapper[4755]: E0202 22:35:56.069305 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:57 crc kubenswrapper[4755]: I0202 22:35:57.068608 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:57 crc kubenswrapper[4755]: E0202 22:35:57.068819 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:35:58 crc kubenswrapper[4755]: I0202 22:35:58.068352 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:35:58 crc kubenswrapper[4755]: I0202 22:35:58.068422 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:35:58 crc kubenswrapper[4755]: I0202 22:35:58.068360 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:35:58 crc kubenswrapper[4755]: E0202 22:35:58.068547 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:35:58 crc kubenswrapper[4755]: E0202 22:35:58.068783 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:35:58 crc kubenswrapper[4755]: E0202 22:35:58.068883 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:35:59 crc kubenswrapper[4755]: I0202 22:35:59.068936 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:35:59 crc kubenswrapper[4755]: E0202 22:35:59.069196 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:00 crc kubenswrapper[4755]: I0202 22:36:00.068772 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:00 crc kubenswrapper[4755]: I0202 22:36:00.068903 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:00 crc kubenswrapper[4755]: E0202 22:36:00.068982 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:00 crc kubenswrapper[4755]: I0202 22:36:00.068917 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:00 crc kubenswrapper[4755]: E0202 22:36:00.069119 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:00 crc kubenswrapper[4755]: E0202 22:36:00.069304 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:01 crc kubenswrapper[4755]: I0202 22:36:01.068704 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:01 crc kubenswrapper[4755]: E0202 22:36:01.069222 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:02 crc kubenswrapper[4755]: I0202 22:36:02.068528 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:02 crc kubenswrapper[4755]: I0202 22:36:02.068589 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:02 crc kubenswrapper[4755]: E0202 22:36:02.068707 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:02 crc kubenswrapper[4755]: I0202 22:36:02.068839 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:02 crc kubenswrapper[4755]: E0202 22:36:02.069027 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:02 crc kubenswrapper[4755]: E0202 22:36:02.069433 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:03 crc kubenswrapper[4755]: I0202 22:36:03.068774 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:03 crc kubenswrapper[4755]: E0202 22:36:03.068962 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:04 crc kubenswrapper[4755]: I0202 22:36:04.067839 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:04 crc kubenswrapper[4755]: I0202 22:36:04.067857 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:04 crc kubenswrapper[4755]: I0202 22:36:04.067997 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:04 crc kubenswrapper[4755]: E0202 22:36:04.068230 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:04 crc kubenswrapper[4755]: E0202 22:36:04.068387 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:04 crc kubenswrapper[4755]: E0202 22:36:04.068547 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:05 crc kubenswrapper[4755]: I0202 22:36:05.068088 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:05 crc kubenswrapper[4755]: E0202 22:36:05.070830 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:05 crc kubenswrapper[4755]: I0202 22:36:05.071983 4755 scope.go:117] "RemoveContainer" containerID="23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f" Feb 02 22:36:05 crc kubenswrapper[4755]: E0202 22:36:05.072306 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4mblb_openshift-ovn-kubernetes(ae78d89e-7970-49df-8839-b1b6d7de4ec1)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" Feb 02 22:36:05 crc kubenswrapper[4755]: E0202 22:36:05.077176 4755 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Feb 02 22:36:05 crc kubenswrapper[4755]: I0202 22:36:05.095654 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=10.095628043 podStartE2EDuration="10.095628043s" podCreationTimestamp="2026-02-02 22:35:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:05.09446466 +0000 UTC m=+120.785685046" watchObservedRunningTime="2026-02-02 22:36:05.095628043 +0000 UTC m=+120.786848409" Feb 02 22:36:05 crc kubenswrapper[4755]: E0202 22:36:05.162180 4755 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:36:05 crc kubenswrapper[4755]: I0202 22:36:05.806210 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5fdlw_c206b6fd-200d-47ea-88a5-453f3093c749/kube-multus/1.log" Feb 02 22:36:05 crc kubenswrapper[4755]: I0202 22:36:05.807188 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5fdlw_c206b6fd-200d-47ea-88a5-453f3093c749/kube-multus/0.log" Feb 02 22:36:05 crc kubenswrapper[4755]: I0202 22:36:05.807252 4755 generic.go:334] "Generic (PLEG): container finished" podID="c206b6fd-200d-47ea-88a5-453f3093c749" containerID="9261f8876de2e09d0918c8008e1c4bde12d29318d43a80e47aa7d6e6cfc35b2b" exitCode=1 Feb 02 22:36:05 crc kubenswrapper[4755]: I0202 22:36:05.807294 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5fdlw" event={"ID":"c206b6fd-200d-47ea-88a5-453f3093c749","Type":"ContainerDied","Data":"9261f8876de2e09d0918c8008e1c4bde12d29318d43a80e47aa7d6e6cfc35b2b"} Feb 02 22:36:05 crc kubenswrapper[4755]: I0202 22:36:05.807339 4755 scope.go:117] "RemoveContainer" containerID="fb367724964bdc550b84e97b13842fb51de38ba7ae7e00b3708b17396d3d5575" Feb 02 22:36:05 crc kubenswrapper[4755]: I0202 22:36:05.807855 4755 scope.go:117] "RemoveContainer" containerID="9261f8876de2e09d0918c8008e1c4bde12d29318d43a80e47aa7d6e6cfc35b2b" Feb 02 22:36:05 crc kubenswrapper[4755]: E0202 22:36:05.808102 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-5fdlw_openshift-multus(c206b6fd-200d-47ea-88a5-453f3093c749)\"" pod="openshift-multus/multus-5fdlw" podUID="c206b6fd-200d-47ea-88a5-453f3093c749" Feb 02 22:36:06 crc kubenswrapper[4755]: I0202 22:36:06.068801 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:06 crc kubenswrapper[4755]: I0202 22:36:06.068854 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:06 crc kubenswrapper[4755]: E0202 22:36:06.070047 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:06 crc kubenswrapper[4755]: E0202 22:36:06.070192 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:06 crc kubenswrapper[4755]: I0202 22:36:06.068910 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:06 crc kubenswrapper[4755]: E0202 22:36:06.070410 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:06 crc kubenswrapper[4755]: I0202 22:36:06.814093 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5fdlw_c206b6fd-200d-47ea-88a5-453f3093c749/kube-multus/1.log" Feb 02 22:36:07 crc kubenswrapper[4755]: I0202 22:36:07.068656 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:07 crc kubenswrapper[4755]: E0202 22:36:07.068902 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:08 crc kubenswrapper[4755]: I0202 22:36:08.068191 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:08 crc kubenswrapper[4755]: I0202 22:36:08.068244 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:08 crc kubenswrapper[4755]: I0202 22:36:08.068205 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:08 crc kubenswrapper[4755]: E0202 22:36:08.068363 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:08 crc kubenswrapper[4755]: E0202 22:36:08.068529 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:08 crc kubenswrapper[4755]: E0202 22:36:08.068665 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:09 crc kubenswrapper[4755]: I0202 22:36:09.068572 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:09 crc kubenswrapper[4755]: E0202 22:36:09.068819 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:10 crc kubenswrapper[4755]: I0202 22:36:10.067998 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:10 crc kubenswrapper[4755]: I0202 22:36:10.068096 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:10 crc kubenswrapper[4755]: I0202 22:36:10.068600 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:10 crc kubenswrapper[4755]: E0202 22:36:10.068831 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:10 crc kubenswrapper[4755]: E0202 22:36:10.068952 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:10 crc kubenswrapper[4755]: E0202 22:36:10.069152 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:10 crc kubenswrapper[4755]: E0202 22:36:10.163389 4755 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:36:11 crc kubenswrapper[4755]: I0202 22:36:11.068558 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:11 crc kubenswrapper[4755]: E0202 22:36:11.068713 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:12 crc kubenswrapper[4755]: I0202 22:36:12.068792 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:12 crc kubenswrapper[4755]: I0202 22:36:12.068787 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:12 crc kubenswrapper[4755]: E0202 22:36:12.068966 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:12 crc kubenswrapper[4755]: I0202 22:36:12.068815 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:12 crc kubenswrapper[4755]: E0202 22:36:12.069095 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:12 crc kubenswrapper[4755]: E0202 22:36:12.069255 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:13 crc kubenswrapper[4755]: I0202 22:36:13.068638 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:13 crc kubenswrapper[4755]: E0202 22:36:13.068972 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:14 crc kubenswrapper[4755]: I0202 22:36:14.068467 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:14 crc kubenswrapper[4755]: I0202 22:36:14.068507 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:14 crc kubenswrapper[4755]: E0202 22:36:14.068680 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:14 crc kubenswrapper[4755]: E0202 22:36:14.068858 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:14 crc kubenswrapper[4755]: I0202 22:36:14.069194 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:14 crc kubenswrapper[4755]: E0202 22:36:14.069397 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:15 crc kubenswrapper[4755]: I0202 22:36:15.068177 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:15 crc kubenswrapper[4755]: E0202 22:36:15.070258 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:15 crc kubenswrapper[4755]: E0202 22:36:15.164037 4755 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:36:16 crc kubenswrapper[4755]: I0202 22:36:16.068414 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:16 crc kubenswrapper[4755]: I0202 22:36:16.068546 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:16 crc kubenswrapper[4755]: I0202 22:36:16.068414 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:16 crc kubenswrapper[4755]: E0202 22:36:16.068595 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:16 crc kubenswrapper[4755]: E0202 22:36:16.068763 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:16 crc kubenswrapper[4755]: E0202 22:36:16.068987 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:17 crc kubenswrapper[4755]: I0202 22:36:17.068904 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:17 crc kubenswrapper[4755]: E0202 22:36:17.069099 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:18 crc kubenswrapper[4755]: I0202 22:36:18.068368 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:18 crc kubenswrapper[4755]: I0202 22:36:18.068415 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:18 crc kubenswrapper[4755]: I0202 22:36:18.068404 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:18 crc kubenswrapper[4755]: E0202 22:36:18.068547 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:18 crc kubenswrapper[4755]: E0202 22:36:18.068698 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:18 crc kubenswrapper[4755]: E0202 22:36:18.068891 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:18 crc kubenswrapper[4755]: I0202 22:36:18.070527 4755 scope.go:117] "RemoveContainer" containerID="23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f" Feb 02 22:36:18 crc kubenswrapper[4755]: I0202 22:36:18.860512 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/3.log" Feb 02 22:36:18 crc kubenswrapper[4755]: I0202 22:36:18.864577 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerStarted","Data":"aaad222fc241dbf84cc91f20beb39ee65aafdf28e0738184def5d58b69e05917"} Feb 02 22:36:18 crc kubenswrapper[4755]: I0202 22:36:18.865259 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:36:18 crc kubenswrapper[4755]: I0202 22:36:18.924684 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podStartSLOduration=107.924657059 podStartE2EDuration="1m47.924657059s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:18.923505026 +0000 UTC m=+134.614725382" watchObservedRunningTime="2026-02-02 22:36:18.924657059 +0000 UTC m=+134.615877435" Feb 02 22:36:18 crc kubenswrapper[4755]: I0202 22:36:18.931125 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-k8tml"] Feb 02 22:36:18 crc kubenswrapper[4755]: I0202 22:36:18.931255 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:18 crc kubenswrapper[4755]: E0202 22:36:18.931393 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:19 crc kubenswrapper[4755]: I0202 22:36:19.069883 4755 scope.go:117] "RemoveContainer" containerID="9261f8876de2e09d0918c8008e1c4bde12d29318d43a80e47aa7d6e6cfc35b2b" Feb 02 22:36:19 crc kubenswrapper[4755]: I0202 22:36:19.870644 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5fdlw_c206b6fd-200d-47ea-88a5-453f3093c749/kube-multus/1.log" Feb 02 22:36:19 crc kubenswrapper[4755]: I0202 22:36:19.871015 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5fdlw" event={"ID":"c206b6fd-200d-47ea-88a5-453f3093c749","Type":"ContainerStarted","Data":"0058d3561a900f271e03fea16adb2cfa9d0fe60aa9931b488e9d55c739895d14"} Feb 02 22:36:20 crc kubenswrapper[4755]: I0202 22:36:20.068549 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:20 crc kubenswrapper[4755]: I0202 22:36:20.068623 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:20 crc kubenswrapper[4755]: I0202 22:36:20.068699 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:20 crc kubenswrapper[4755]: E0202 22:36:20.069097 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:20 crc kubenswrapper[4755]: E0202 22:36:20.069363 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:20 crc kubenswrapper[4755]: E0202 22:36:20.069560 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:20 crc kubenswrapper[4755]: E0202 22:36:20.165492 4755 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:36:21 crc kubenswrapper[4755]: I0202 22:36:21.068779 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:21 crc kubenswrapper[4755]: E0202 22:36:21.068997 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:22 crc kubenswrapper[4755]: I0202 22:36:22.068836 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:22 crc kubenswrapper[4755]: I0202 22:36:22.068972 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:22 crc kubenswrapper[4755]: E0202 22:36:22.069072 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:22 crc kubenswrapper[4755]: I0202 22:36:22.068839 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:22 crc kubenswrapper[4755]: E0202 22:36:22.069167 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:22 crc kubenswrapper[4755]: E0202 22:36:22.069295 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:23 crc kubenswrapper[4755]: I0202 22:36:23.068461 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:23 crc kubenswrapper[4755]: E0202 22:36:23.068705 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:24 crc kubenswrapper[4755]: I0202 22:36:24.068646 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:24 crc kubenswrapper[4755]: I0202 22:36:24.068718 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:24 crc kubenswrapper[4755]: I0202 22:36:24.068677 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:24 crc kubenswrapper[4755]: E0202 22:36:24.068864 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 22:36:24 crc kubenswrapper[4755]: E0202 22:36:24.069246 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 22:36:24 crc kubenswrapper[4755]: E0202 22:36:24.069431 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 22:36:25 crc kubenswrapper[4755]: I0202 22:36:25.068823 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:25 crc kubenswrapper[4755]: E0202 22:36:25.070593 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-k8tml" podUID="3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923" Feb 02 22:36:26 crc kubenswrapper[4755]: I0202 22:36:26.068069 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:26 crc kubenswrapper[4755]: I0202 22:36:26.068215 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:26 crc kubenswrapper[4755]: I0202 22:36:26.068274 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:26 crc kubenswrapper[4755]: I0202 22:36:26.071525 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 02 22:36:26 crc kubenswrapper[4755]: I0202 22:36:26.072080 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 02 22:36:26 crc kubenswrapper[4755]: I0202 22:36:26.072125 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 02 22:36:26 crc kubenswrapper[4755]: I0202 22:36:26.072247 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 02 22:36:27 crc kubenswrapper[4755]: I0202 22:36:27.068092 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:27 crc kubenswrapper[4755]: I0202 22:36:27.070924 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 02 22:36:27 crc kubenswrapper[4755]: I0202 22:36:27.073086 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.746340 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.844850 4755 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.890567 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-vcjbc"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.891267 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.892398 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.893170 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.894095 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.894899 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.895864 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.896784 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.896831 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.900614 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-s6v4j"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.903834 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-7qkqs"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.904285 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.904803 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.906720 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.908632 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lrgth"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.909369 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.909751 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.910363 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.914613 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.915101 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.915247 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.915451 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.915852 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.916069 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.916291 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.916406 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.917026 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.917343 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.917455 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.917525 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.917567 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.917675 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.917705 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b5zvf"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.918258 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.918295 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.918414 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.918262 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-n987n"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.918871 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-n987n" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.918881 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.919656 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.919862 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.919973 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.919983 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.920031 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.920188 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.920252 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.920293 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.920352 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.920395 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.920495 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.920519 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.920589 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.920636 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.921658 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.921860 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.922211 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.922322 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.922423 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.927695 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.928109 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.928588 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.930573 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.930626 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.931139 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-ttsv6"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.931524 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.931936 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.931986 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.932103 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.935807 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.936282 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.936548 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.947407 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.947572 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.959550 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.961210 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.961325 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.961449 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.961488 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.961452 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.961654 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.961738 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.961746 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.961824 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.962038 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.962958 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.964391 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.965108 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.965563 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-s9mjc"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.966251 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.966532 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.966676 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.966749 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ch6t8"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.966784 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.967527 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.968770 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.969235 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.969584 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.969599 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.969828 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.969972 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.970000 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-s9mjc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.970072 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.970144 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.970223 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.970301 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.970871 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.971362 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-rwshx"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.971616 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.971659 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.971900 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.972036 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.976813 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.977064 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.977245 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.978545 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.978965 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.980159 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.983173 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5nv2v"] Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.984312 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.988966 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990364 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/19316443-6a89-4993-b196-1de2bece6e84-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990390 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-serving-cert\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990409 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-etcd-serving-ca\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990425 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/19316443-6a89-4993-b196-1de2bece6e84-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990441 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-287x9\" (UniqueName: \"kubernetes.io/projected/fbc07c99-ae2c-459e-9731-ca524c8bfa08-kube-api-access-287x9\") pod \"route-controller-manager-6576b87f9c-m8cdx\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990458 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/19316443-6a89-4993-b196-1de2bece6e84-etcd-client\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990473 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgxk9\" (UniqueName: \"kubernetes.io/projected/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-kube-api-access-fgxk9\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990488 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-etcd-client\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990513 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49503265-4d0e-484f-8832-38be62c19af0-config\") pod \"machine-approver-56656f9798-fh5rw\" (UID: \"49503265-4d0e-484f-8832-38be62c19af0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990534 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-audit\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990549 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19316443-6a89-4993-b196-1de2bece6e84-serving-cert\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990566 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbc07c99-ae2c-459e-9731-ca524c8bfa08-serving-cert\") pod \"route-controller-manager-6576b87f9c-m8cdx\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990582 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/49503265-4d0e-484f-8832-38be62c19af0-auth-proxy-config\") pod \"machine-approver-56656f9798-fh5rw\" (UID: \"49503265-4d0e-484f-8832-38be62c19af0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990598 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-node-pullsecrets\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990613 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-image-import-ca\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990631 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsntf\" (UniqueName: \"kubernetes.io/projected/49503265-4d0e-484f-8832-38be62c19af0-kube-api-access-jsntf\") pod \"machine-approver-56656f9798-fh5rw\" (UID: \"49503265-4d0e-484f-8832-38be62c19af0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990654 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/49503265-4d0e-484f-8832-38be62c19af0-machine-approver-tls\") pod \"machine-approver-56656f9798-fh5rw\" (UID: \"49503265-4d0e-484f-8832-38be62c19af0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990668 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-config\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990684 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbc07c99-ae2c-459e-9731-ca524c8bfa08-config\") pod \"route-controller-manager-6576b87f9c-m8cdx\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990709 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/19316443-6a89-4993-b196-1de2bece6e84-audit-policies\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990740 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nw8r\" (UniqueName: \"kubernetes.io/projected/19316443-6a89-4993-b196-1de2bece6e84-kube-api-access-4nw8r\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990761 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbc07c99-ae2c-459e-9731-ca524c8bfa08-client-ca\") pod \"route-controller-manager-6576b87f9c-m8cdx\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990774 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-encryption-config\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990787 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/19316443-6a89-4993-b196-1de2bece6e84-audit-dir\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990801 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-trusted-ca-bundle\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990813 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-audit-dir\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.990835 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/19316443-6a89-4993-b196-1de2bece6e84-encryption-config\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:33 crc kubenswrapper[4755]: I0202 22:36:33.999111 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.001033 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-z7g7z"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.029653 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.029880 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.040172 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.042780 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.043446 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.044581 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.044833 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.045033 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.045214 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.045368 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.045516 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.045799 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.045946 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.046092 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.046956 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.047371 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.047404 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.047498 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.047566 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.047763 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.047902 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.048036 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.054539 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.055081 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.055844 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.059017 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z7g7z" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.058742 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.060490 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.058789 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.060596 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.063205 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.063574 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.059130 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.063746 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.059149 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.061143 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.071150 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-2x78p"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.073700 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.076912 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.072335 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.074287 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.073943 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.078178 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.078245 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.081010 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dgrbx"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.081292 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.081501 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.081519 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.082103 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.082225 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.082392 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.082490 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.082859 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.083177 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hnz5f"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.083191 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.083242 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.083662 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-lkd4r"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.083803 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.084283 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.084335 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-lkd4r" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.084564 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-s6v4j"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.084582 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-vdz28"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.084893 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.084942 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vdz28" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.084905 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-vcjbc"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.085086 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.085102 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.085114 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lrgth"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.088379 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.091520 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-7qkqs"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.091869 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.091998 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbc07c99-ae2c-459e-9731-ca524c8bfa08-serving-cert\") pod \"route-controller-manager-6576b87f9c-m8cdx\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092026 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9deda20-3739-4585-a197-f51ce9a63b8c-config\") pod \"kube-controller-manager-operator-78b949d7b-nlddn\" (UID: \"b9deda20-3739-4585-a197-f51ce9a63b8c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092047 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d98f0d35-15ef-4d83-9c0b-104f44f4ae41-config\") pod \"kube-apiserver-operator-766d6c64bb-fzwbz\" (UID: \"d98f0d35-15ef-4d83-9c0b-104f44f4ae41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092064 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-oauth-serving-cert\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092080 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl24t\" (UniqueName: \"kubernetes.io/projected/1713b1df-6fc2-4060-91b9-e631ad9b335f-kube-api-access-fl24t\") pod \"ingress-operator-5b745b69d9-sl4x8\" (UID: \"1713b1df-6fc2-4060-91b9-e631ad9b335f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092096 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092113 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092129 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcvcr\" (UniqueName: \"kubernetes.io/projected/79899b57-242d-45e0-8527-2af257b8a5b7-kube-api-access-wcvcr\") pod \"cluster-samples-operator-665b6dd947-qc97t\" (UID: \"79899b57-242d-45e0-8527-2af257b8a5b7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092144 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1713b1df-6fc2-4060-91b9-e631ad9b335f-metrics-tls\") pod \"ingress-operator-5b745b69d9-sl4x8\" (UID: \"1713b1df-6fc2-4060-91b9-e631ad9b335f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092158 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-config\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092176 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/49503265-4d0e-484f-8832-38be62c19af0-auth-proxy-config\") pod \"machine-approver-56656f9798-fh5rw\" (UID: \"49503265-4d0e-484f-8832-38be62c19af0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:34 crc kubenswrapper[4755]: E0202 22:36:34.092208 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:38:36.092185868 +0000 UTC m=+271.783406194 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092240 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092275 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0a4a5bd4-3691-4bce-9266-3d3a05dae585-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-76ppj\" (UID: \"0a4a5bd4-3691-4bce-9266-3d3a05dae585\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092294 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8mx8\" (UniqueName: \"kubernetes.io/projected/c61f342b-cd14-408f-8c6e-e65cee1ebb39-kube-api-access-l8mx8\") pod \"machine-api-operator-5694c8668f-s6v4j\" (UID: \"c61f342b-cd14-408f-8c6e-e65cee1ebb39\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092310 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0a4a5bd4-3691-4bce-9266-3d3a05dae585-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-76ppj\" (UID: \"0a4a5bd4-3691-4bce-9266-3d3a05dae585\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092335 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsntf\" (UniqueName: \"kubernetes.io/projected/49503265-4d0e-484f-8832-38be62c19af0-kube-api-access-jsntf\") pod \"machine-approver-56656f9798-fh5rw\" (UID: \"49503265-4d0e-484f-8832-38be62c19af0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092352 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-node-pullsecrets\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092369 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-image-import-ca\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092386 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f45sx\" (UniqueName: \"kubernetes.io/projected/a6f21874-ee8f-4718-b2ab-8b4a97543364-kube-api-access-f45sx\") pod \"downloads-7954f5f757-n987n\" (UID: \"a6f21874-ee8f-4718-b2ab-8b4a97543364\") " pod="openshift-console/downloads-7954f5f757-n987n" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092402 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-etcd-client\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092418 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57k6c\" (UniqueName: \"kubernetes.io/projected/938066f1-bde4-4bbc-ad80-47379f6a66ff-kube-api-access-57k6c\") pod \"migrator-59844c95c7-z7g7z\" (UID: \"938066f1-bde4-4bbc-ad80-47379f6a66ff\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z7g7z" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092448 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-etcd-service-ca\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092466 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-config\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092483 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbc07c99-ae2c-459e-9731-ca524c8bfa08-config\") pod \"route-controller-manager-6576b87f9c-m8cdx\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092503 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/49503265-4d0e-484f-8832-38be62c19af0-machine-approver-tls\") pod \"machine-approver-56656f9798-fh5rw\" (UID: \"49503265-4d0e-484f-8832-38be62c19af0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092520 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21a3e3ee-831b-450a-b0da-13551b7353e4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bwhck\" (UID: \"21a3e3ee-831b-450a-b0da-13551b7353e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092538 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ttfq\" (UniqueName: \"kubernetes.io/projected/5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2-kube-api-access-8ttfq\") pod \"console-operator-58897d9998-b5zvf\" (UID: \"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2\") " pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092554 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1713b1df-6fc2-4060-91b9-e631ad9b335f-bound-sa-token\") pod \"ingress-operator-5b745b69d9-sl4x8\" (UID: \"1713b1df-6fc2-4060-91b9-e631ad9b335f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092570 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4xfm\" (UniqueName: \"kubernetes.io/projected/0124b915-2ac4-4be7-b356-bf78a8295d9d-kube-api-access-q4xfm\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092589 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092618 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092634 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092651 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9jvr\" (UniqueName: \"kubernetes.io/projected/21bfe782-2848-4171-8919-e1ce96150a09-kube-api-access-t9jvr\") pod \"openshift-apiserver-operator-796bbdcf4f-dx54k\" (UID: \"21bfe782-2848-4171-8919-e1ce96150a09\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092668 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c61f342b-cd14-408f-8c6e-e65cee1ebb39-images\") pod \"machine-api-operator-5694c8668f-s6v4j\" (UID: \"c61f342b-cd14-408f-8c6e-e65cee1ebb39\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092681 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-config\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092698 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-config\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092713 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092747 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-service-ca\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092766 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/c61f342b-cd14-408f-8c6e-e65cee1ebb39-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-s6v4j\" (UID: \"c61f342b-cd14-408f-8c6e-e65cee1ebb39\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092780 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092806 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/19316443-6a89-4993-b196-1de2bece6e84-audit-policies\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092821 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2-config\") pod \"console-operator-58897d9998-b5zvf\" (UID: \"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2\") " pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092834 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-client-ca\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092849 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21a3e3ee-831b-450a-b0da-13551b7353e4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bwhck\" (UID: \"21a3e3ee-831b-450a-b0da-13551b7353e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092863 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092878 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-serving-cert\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092889 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/49503265-4d0e-484f-8832-38be62c19af0-auth-proxy-config\") pod \"machine-approver-56656f9798-fh5rw\" (UID: \"49503265-4d0e-484f-8832-38be62c19af0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092896 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092915 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nw8r\" (UniqueName: \"kubernetes.io/projected/19316443-6a89-4993-b196-1de2bece6e84-kube-api-access-4nw8r\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092930 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6vdn\" (UniqueName: \"kubernetes.io/projected/0a4a5bd4-3691-4bce-9266-3d3a05dae585-kube-api-access-p6vdn\") pod \"cluster-image-registry-operator-dc59b4c8b-76ppj\" (UID: \"0a4a5bd4-3691-4bce-9266-3d3a05dae585\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092951 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-encryption-config\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092967 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/19316443-6a89-4993-b196-1de2bece6e84-audit-dir\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092982 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbc07c99-ae2c-459e-9731-ca524c8bfa08-client-ca\") pod \"route-controller-manager-6576b87f9c-m8cdx\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.092999 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21bfe782-2848-4171-8919-e1ce96150a09-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dx54k\" (UID: \"21bfe782-2848-4171-8919-e1ce96150a09\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093014 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09e605cd-967a-462f-8fad-1cf16ef64351-serving-cert\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093031 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76hsw\" (UniqueName: \"kubernetes.io/projected/21a3e3ee-831b-450a-b0da-13551b7353e4-kube-api-access-76hsw\") pod \"openshift-controller-manager-operator-756b6f6bc6-bwhck\" (UID: \"21a3e3ee-831b-450a-b0da-13551b7353e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093049 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-trusted-ca-bundle\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093063 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-audit-dir\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093078 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/19316443-6a89-4993-b196-1de2bece6e84-encryption-config\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093093 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093108 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-oauth-config\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093132 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9deda20-3739-4585-a197-f51ce9a63b8c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-nlddn\" (UID: \"b9deda20-3739-4585-a197-f51ce9a63b8c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093149 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-serving-cert\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093164 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/19316443-6a89-4993-b196-1de2bece6e84-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093180 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093194 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-etcd-ca\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093209 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/19316443-6a89-4993-b196-1de2bece6e84-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093224 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-287x9\" (UniqueName: \"kubernetes.io/projected/fbc07c99-ae2c-459e-9731-ca524c8bfa08-kube-api-access-287x9\") pod \"route-controller-manager-6576b87f9c-m8cdx\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093243 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-etcd-serving-ca\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093261 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-serving-cert\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093277 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d98f0d35-15ef-4d83-9c0b-104f44f4ae41-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-fzwbz\" (UID: \"d98f0d35-15ef-4d83-9c0b-104f44f4ae41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093292 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093310 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/19316443-6a89-4993-b196-1de2bece6e84-etcd-client\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093324 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2-serving-cert\") pod \"console-operator-58897d9998-b5zvf\" (UID: \"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2\") " pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093339 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1812beb5-4070-4649-8b5b-e78e5257c3dc-metrics-tls\") pod \"dns-operator-744455d44c-s9mjc\" (UID: \"1812beb5-4070-4649-8b5b-e78e5257c3dc\") " pod="openshift-dns-operator/dns-operator-744455d44c-s9mjc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093354 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21bfe782-2848-4171-8919-e1ce96150a09-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dx54k\" (UID: \"21bfe782-2848-4171-8919-e1ce96150a09\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093371 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-49pm9\" (UID: \"7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093386 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q8nk\" (UniqueName: \"kubernetes.io/projected/1812beb5-4070-4649-8b5b-e78e5257c3dc-kube-api-access-7q8nk\") pod \"dns-operator-744455d44c-s9mjc\" (UID: \"1812beb5-4070-4649-8b5b-e78e5257c3dc\") " pod="openshift-dns-operator/dns-operator-744455d44c-s9mjc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093401 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c61f342b-cd14-408f-8c6e-e65cee1ebb39-config\") pod \"machine-api-operator-5694c8668f-s6v4j\" (UID: \"c61f342b-cd14-408f-8c6e-e65cee1ebb39\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093415 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-service-ca-bundle\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093429 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj4hs\" (UniqueName: \"kubernetes.io/projected/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-kube-api-access-rj4hs\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093443 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-serving-cert\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093458 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-49pm9\" (UID: \"7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093473 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2-trusted-ca\") pod \"console-operator-58897d9998-b5zvf\" (UID: \"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2\") " pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093490 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8868\" (UniqueName: \"kubernetes.io/projected/09e605cd-967a-462f-8fad-1cf16ef64351-kube-api-access-p8868\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093504 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9deda20-3739-4585-a197-f51ce9a63b8c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-nlddn\" (UID: \"b9deda20-3739-4585-a197-f51ce9a63b8c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093520 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093536 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-audit-dir\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093549 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-trusted-ca-bundle\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093563 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-audit-policies\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093577 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-49pm9\" (UID: \"7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093596 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgxk9\" (UniqueName: \"kubernetes.io/projected/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-kube-api-access-fgxk9\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093613 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-config\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093634 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093650 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-etcd-client\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093665 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a4a5bd4-3691-4bce-9266-3d3a05dae585-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-76ppj\" (UID: \"0a4a5bd4-3691-4bce-9266-3d3a05dae585\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093678 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1713b1df-6fc2-4060-91b9-e631ad9b335f-trusted-ca\") pod \"ingress-operator-5b745b69d9-sl4x8\" (UID: \"1713b1df-6fc2-4060-91b9-e631ad9b335f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093696 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093712 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49503265-4d0e-484f-8832-38be62c19af0-config\") pod \"machine-approver-56656f9798-fh5rw\" (UID: \"49503265-4d0e-484f-8832-38be62c19af0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093740 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/79899b57-242d-45e0-8527-2af257b8a5b7-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-qc97t\" (UID: \"79899b57-242d-45e0-8527-2af257b8a5b7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093756 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093771 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd5kf\" (UniqueName: \"kubernetes.io/projected/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-kube-api-access-wd5kf\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093785 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t24jr\" (UniqueName: \"kubernetes.io/projected/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-kube-api-access-t24jr\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093815 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-audit\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093850 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19316443-6a89-4993-b196-1de2bece6e84-serving-cert\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.093867 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d98f0d35-15ef-4d83-9c0b-104f44f4ae41-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-fzwbz\" (UID: \"d98f0d35-15ef-4d83-9c0b-104f44f4ae41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.094597 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/19316443-6a89-4993-b196-1de2bece6e84-audit-policies\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.097015 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.098254 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-image-import-ca\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.098410 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-node-pullsecrets\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.098616 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbc07c99-ae2c-459e-9731-ca524c8bfa08-serving-cert\") pod \"route-controller-manager-6576b87f9c-m8cdx\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.098660 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-config\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.099284 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.099539 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbc07c99-ae2c-459e-9731-ca524c8bfa08-config\") pod \"route-controller-manager-6576b87f9c-m8cdx\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.099637 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49503265-4d0e-484f-8832-38be62c19af0-config\") pod \"machine-approver-56656f9798-fh5rw\" (UID: \"49503265-4d0e-484f-8832-38be62c19af0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.099887 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/19316443-6a89-4993-b196-1de2bece6e84-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.099948 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-audit-dir\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.100123 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-audit\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.100386 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.100395 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-trusted-ca-bundle\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.100426 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/19316443-6a89-4993-b196-1de2bece6e84-audit-dir\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.100845 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-encryption-config\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.100974 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbc07c99-ae2c-459e-9731-ca524c8bfa08-client-ca\") pod \"route-controller-manager-6576b87f9c-m8cdx\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.101334 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-etcd-serving-ca\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.101362 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.102899 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/19316443-6a89-4993-b196-1de2bece6e84-encryption-config\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.105363 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19316443-6a89-4993-b196-1de2bece6e84-serving-cert\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.105584 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/19316443-6a89-4993-b196-1de2bece6e84-etcd-client\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.106032 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-etcd-client\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.111675 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-serving-cert\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.114127 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/19316443-6a89-4993-b196-1de2bece6e84-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.114574 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/49503265-4d0e-484f-8832-38be62c19af0-machine-approver-tls\") pod \"machine-approver-56656f9798-fh5rw\" (UID: \"49503265-4d0e-484f-8832-38be62c19af0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.116257 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.126580 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.128586 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.129140 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-ttsv6"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.132420 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.133758 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.134215 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.136102 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.137376 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-s9mjc"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.141011 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.145790 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5nv2v"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.150768 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.150805 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.153292 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.155901 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-n987n"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.156506 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.159186 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dgrbx"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.160585 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.161974 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.162949 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b5zvf"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.164028 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-z7g7z"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.165712 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.166774 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vdz28"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.167861 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.169443 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.171167 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.173039 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.173854 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-4dnjt"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.174685 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.175027 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ch6t8"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.176105 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-rwshx"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.177569 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.178681 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.179776 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.180832 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-4dnjt"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.181896 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hnz5f"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.182940 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.183920 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-lkd4r"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.184964 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-6t4wf"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.185507 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6t4wf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.186750 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-gn7kw"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.187718 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.187975 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.188167 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-gn7kw"] Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.193633 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194467 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21bfe782-2848-4171-8919-e1ce96150a09-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dx54k\" (UID: \"21bfe782-2848-4171-8919-e1ce96150a09\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194497 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09e605cd-967a-462f-8fad-1cf16ef64351-serving-cert\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194520 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76hsw\" (UniqueName: \"kubernetes.io/projected/21a3e3ee-831b-450a-b0da-13551b7353e4-kube-api-access-76hsw\") pod \"openshift-controller-manager-operator-756b6f6bc6-bwhck\" (UID: \"21a3e3ee-831b-450a-b0da-13551b7353e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194540 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194558 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-oauth-config\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194576 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9deda20-3739-4585-a197-f51ce9a63b8c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-nlddn\" (UID: \"b9deda20-3739-4585-a197-f51ce9a63b8c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194593 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-etcd-ca\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194610 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194635 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-serving-cert\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194650 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d98f0d35-15ef-4d83-9c0b-104f44f4ae41-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-fzwbz\" (UID: \"d98f0d35-15ef-4d83-9c0b-104f44f4ae41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194668 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21bfe782-2848-4171-8919-e1ce96150a09-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dx54k\" (UID: \"21bfe782-2848-4171-8919-e1ce96150a09\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194685 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194701 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2-serving-cert\") pod \"console-operator-58897d9998-b5zvf\" (UID: \"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2\") " pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194717 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1812beb5-4070-4649-8b5b-e78e5257c3dc-metrics-tls\") pod \"dns-operator-744455d44c-s9mjc\" (UID: \"1812beb5-4070-4649-8b5b-e78e5257c3dc\") " pod="openshift-dns-operator/dns-operator-744455d44c-s9mjc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.194748 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj4hs\" (UniqueName: \"kubernetes.io/projected/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-kube-api-access-rj4hs\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.195406 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21bfe782-2848-4171-8919-e1ce96150a09-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dx54k\" (UID: \"21bfe782-2848-4171-8919-e1ce96150a09\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.195717 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-49pm9\" (UID: \"7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.195755 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q8nk\" (UniqueName: \"kubernetes.io/projected/1812beb5-4070-4649-8b5b-e78e5257c3dc-kube-api-access-7q8nk\") pod \"dns-operator-744455d44c-s9mjc\" (UID: \"1812beb5-4070-4649-8b5b-e78e5257c3dc\") " pod="openshift-dns-operator/dns-operator-744455d44c-s9mjc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.195904 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c61f342b-cd14-408f-8c6e-e65cee1ebb39-config\") pod \"machine-api-operator-5694c8668f-s6v4j\" (UID: \"c61f342b-cd14-408f-8c6e-e65cee1ebb39\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.195928 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-service-ca-bundle\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.195949 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.195969 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-serving-cert\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.195986 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-49pm9\" (UID: \"7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196002 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2-trusted-ca\") pod \"console-operator-58897d9998-b5zvf\" (UID: \"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2\") " pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196021 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8868\" (UniqueName: \"kubernetes.io/projected/09e605cd-967a-462f-8fad-1cf16ef64351-kube-api-access-p8868\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196040 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9deda20-3739-4585-a197-f51ce9a63b8c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-nlddn\" (UID: \"b9deda20-3739-4585-a197-f51ce9a63b8c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196057 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-audit-dir\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196075 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-trusted-ca-bundle\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196091 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-config\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196106 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-audit-policies\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196122 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-49pm9\" (UID: \"7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196145 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/79899b57-242d-45e0-8527-2af257b8a5b7-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-qc97t\" (UID: \"79899b57-242d-45e0-8527-2af257b8a5b7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196162 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a4a5bd4-3691-4bce-9266-3d3a05dae585-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-76ppj\" (UID: \"0a4a5bd4-3691-4bce-9266-3d3a05dae585\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196192 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1713b1df-6fc2-4060-91b9-e631ad9b335f-trusted-ca\") pod \"ingress-operator-5b745b69d9-sl4x8\" (UID: \"1713b1df-6fc2-4060-91b9-e631ad9b335f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196214 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196247 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd5kf\" (UniqueName: \"kubernetes.io/projected/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-kube-api-access-wd5kf\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196275 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t24jr\" (UniqueName: \"kubernetes.io/projected/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-kube-api-access-t24jr\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196297 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d98f0d35-15ef-4d83-9c0b-104f44f4ae41-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-fzwbz\" (UID: \"d98f0d35-15ef-4d83-9c0b-104f44f4ae41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196320 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9deda20-3739-4585-a197-f51ce9a63b8c-config\") pod \"kube-controller-manager-operator-78b949d7b-nlddn\" (UID: \"b9deda20-3739-4585-a197-f51ce9a63b8c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196339 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d98f0d35-15ef-4d83-9c0b-104f44f4ae41-config\") pod \"kube-apiserver-operator-766d6c64bb-fzwbz\" (UID: \"d98f0d35-15ef-4d83-9c0b-104f44f4ae41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196360 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-oauth-serving-cert\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196381 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl24t\" (UniqueName: \"kubernetes.io/projected/1713b1df-6fc2-4060-91b9-e631ad9b335f-kube-api-access-fl24t\") pod \"ingress-operator-5b745b69d9-sl4x8\" (UID: \"1713b1df-6fc2-4060-91b9-e631ad9b335f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196402 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196420 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196440 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcvcr\" (UniqueName: \"kubernetes.io/projected/79899b57-242d-45e0-8527-2af257b8a5b7-kube-api-access-wcvcr\") pod \"cluster-samples-operator-665b6dd947-qc97t\" (UID: \"79899b57-242d-45e0-8527-2af257b8a5b7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196462 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1713b1df-6fc2-4060-91b9-e631ad9b335f-metrics-tls\") pod \"ingress-operator-5b745b69d9-sl4x8\" (UID: \"1713b1df-6fc2-4060-91b9-e631ad9b335f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196483 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-config\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196505 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196527 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0a4a5bd4-3691-4bce-9266-3d3a05dae585-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-76ppj\" (UID: \"0a4a5bd4-3691-4bce-9266-3d3a05dae585\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196545 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f45sx\" (UniqueName: \"kubernetes.io/projected/a6f21874-ee8f-4718-b2ab-8b4a97543364-kube-api-access-f45sx\") pod \"downloads-7954f5f757-n987n\" (UID: \"a6f21874-ee8f-4718-b2ab-8b4a97543364\") " pod="openshift-console/downloads-7954f5f757-n987n" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196561 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8mx8\" (UniqueName: \"kubernetes.io/projected/c61f342b-cd14-408f-8c6e-e65cee1ebb39-kube-api-access-l8mx8\") pod \"machine-api-operator-5694c8668f-s6v4j\" (UID: \"c61f342b-cd14-408f-8c6e-e65cee1ebb39\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196577 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0a4a5bd4-3691-4bce-9266-3d3a05dae585-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-76ppj\" (UID: \"0a4a5bd4-3691-4bce-9266-3d3a05dae585\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196608 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-etcd-client\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196623 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57k6c\" (UniqueName: \"kubernetes.io/projected/938066f1-bde4-4bbc-ad80-47379f6a66ff-kube-api-access-57k6c\") pod \"migrator-59844c95c7-z7g7z\" (UID: \"938066f1-bde4-4bbc-ad80-47379f6a66ff\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z7g7z" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196648 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-etcd-service-ca\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196667 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21a3e3ee-831b-450a-b0da-13551b7353e4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bwhck\" (UID: \"21a3e3ee-831b-450a-b0da-13551b7353e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196683 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ttfq\" (UniqueName: \"kubernetes.io/projected/5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2-kube-api-access-8ttfq\") pod \"console-operator-58897d9998-b5zvf\" (UID: \"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2\") " pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196698 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1713b1df-6fc2-4060-91b9-e631ad9b335f-bound-sa-token\") pod \"ingress-operator-5b745b69d9-sl4x8\" (UID: \"1713b1df-6fc2-4060-91b9-e631ad9b335f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196717 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4xfm\" (UniqueName: \"kubernetes.io/projected/0124b915-2ac4-4be7-b356-bf78a8295d9d-kube-api-access-q4xfm\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196751 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196768 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196785 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9jvr\" (UniqueName: \"kubernetes.io/projected/21bfe782-2848-4171-8919-e1ce96150a09-kube-api-access-t9jvr\") pod \"openshift-apiserver-operator-796bbdcf4f-dx54k\" (UID: \"21bfe782-2848-4171-8919-e1ce96150a09\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196800 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196815 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c61f342b-cd14-408f-8c6e-e65cee1ebb39-images\") pod \"machine-api-operator-5694c8668f-s6v4j\" (UID: \"c61f342b-cd14-408f-8c6e-e65cee1ebb39\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196830 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-config\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196846 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-config\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196862 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-service-ca\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196878 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/c61f342b-cd14-408f-8c6e-e65cee1ebb39-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-s6v4j\" (UID: \"c61f342b-cd14-408f-8c6e-e65cee1ebb39\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196896 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196911 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2-config\") pod \"console-operator-58897d9998-b5zvf\" (UID: \"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2\") " pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196926 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-client-ca\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196942 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21a3e3ee-831b-450a-b0da-13551b7353e4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bwhck\" (UID: \"21a3e3ee-831b-450a-b0da-13551b7353e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196957 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196973 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6vdn\" (UniqueName: \"kubernetes.io/projected/0a4a5bd4-3691-4bce-9266-3d3a05dae585-kube-api-access-p6vdn\") pod \"cluster-image-registry-operator-dc59b4c8b-76ppj\" (UID: \"0a4a5bd4-3691-4bce-9266-3d3a05dae585\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.196987 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-serving-cert\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.198278 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21bfe782-2848-4171-8919-e1ce96150a09-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dx54k\" (UID: \"21bfe782-2848-4171-8919-e1ce96150a09\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.198519 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-etcd-ca\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.198596 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c61f342b-cd14-408f-8c6e-e65cee1ebb39-config\") pod \"machine-api-operator-5694c8668f-s6v4j\" (UID: \"c61f342b-cd14-408f-8c6e-e65cee1ebb39\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.199223 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09e605cd-967a-462f-8fad-1cf16ef64351-serving-cert\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.199247 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-serving-cert\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.199316 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-service-ca-bundle\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.199317 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-oauth-config\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.199802 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-config\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.200249 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-trusted-ca-bundle\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.200509 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.200624 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/0a4a5bd4-3691-4bce-9266-3d3a05dae585-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-76ppj\" (UID: \"0a4a5bd4-3691-4bce-9266-3d3a05dae585\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.200646 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0a4a5bd4-3691-4bce-9266-3d3a05dae585-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-76ppj\" (UID: \"0a4a5bd4-3691-4bce-9266-3d3a05dae585\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.201381 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2-serving-cert\") pod \"console-operator-58897d9998-b5zvf\" (UID: \"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2\") " pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.201437 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1812beb5-4070-4649-8b5b-e78e5257c3dc-metrics-tls\") pod \"dns-operator-744455d44c-s9mjc\" (UID: \"1812beb5-4070-4649-8b5b-e78e5257c3dc\") " pod="openshift-dns-operator/dns-operator-744455d44c-s9mjc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.201540 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1713b1df-6fc2-4060-91b9-e631ad9b335f-trusted-ca\") pod \"ingress-operator-5b745b69d9-sl4x8\" (UID: \"1713b1df-6fc2-4060-91b9-e631ad9b335f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.202215 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-oauth-serving-cert\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.202342 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9deda20-3739-4585-a197-f51ce9a63b8c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-nlddn\" (UID: \"b9deda20-3739-4585-a197-f51ce9a63b8c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.202439 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2-trusted-ca\") pod \"console-operator-58897d9998-b5zvf\" (UID: \"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2\") " pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.202834 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-audit-dir\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.202956 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9deda20-3739-4585-a197-f51ce9a63b8c-config\") pod \"kube-controller-manager-operator-78b949d7b-nlddn\" (UID: \"b9deda20-3739-4585-a197-f51ce9a63b8c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.203125 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-etcd-service-ca\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.203161 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-config\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.203251 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2-config\") pod \"console-operator-58897d9998-b5zvf\" (UID: \"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2\") " pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.203342 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21a3e3ee-831b-450a-b0da-13551b7353e4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-bwhck\" (UID: \"21a3e3ee-831b-450a-b0da-13551b7353e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.203861 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.203886 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-client-ca\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.203988 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-49pm9\" (UID: \"7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.204446 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-service-ca\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.204533 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-serving-cert\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.204747 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-etcd-client\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.204911 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c61f342b-cd14-408f-8c6e-e65cee1ebb39-images\") pod \"machine-api-operator-5694c8668f-s6v4j\" (UID: \"c61f342b-cd14-408f-8c6e-e65cee1ebb39\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.204990 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-49pm9\" (UID: \"7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.205054 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-serving-cert\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.205169 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-config\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.206124 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/79899b57-242d-45e0-8527-2af257b8a5b7-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-qc97t\" (UID: \"79899b57-242d-45e0-8527-2af257b8a5b7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.206446 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-config\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.206618 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1713b1df-6fc2-4060-91b9-e631ad9b335f-metrics-tls\") pod \"ingress-operator-5b745b69d9-sl4x8\" (UID: \"1713b1df-6fc2-4060-91b9-e631ad9b335f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.206878 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.207607 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21a3e3ee-831b-450a-b0da-13551b7353e4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-bwhck\" (UID: \"21a3e3ee-831b-450a-b0da-13551b7353e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.208742 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/c61f342b-cd14-408f-8c6e-e65cee1ebb39-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-s6v4j\" (UID: \"c61f342b-cd14-408f-8c6e-e65cee1ebb39\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.212277 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.213766 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.233575 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.253279 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.264302 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d98f0d35-15ef-4d83-9c0b-104f44f4ae41-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-fzwbz\" (UID: \"d98f0d35-15ef-4d83-9c0b-104f44f4ae41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.273395 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.283487 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d98f0d35-15ef-4d83-9c0b-104f44f4ae41-config\") pod \"kube-apiserver-operator-766d6c64bb-fzwbz\" (UID: \"d98f0d35-15ef-4d83-9c0b-104f44f4ae41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.313298 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.321348 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.333694 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.353466 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.365364 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.374061 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.393536 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.398189 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.413485 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.427489 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.433223 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.443819 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-audit-policies\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: W0202 22:36:34.450053 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-1bf963528a2f9b7a218fefaff76c471601f9d6e66751624693246d76a0b5a18c WatchSource:0}: Error finding container 1bf963528a2f9b7a218fefaff76c471601f9d6e66751624693246d76a0b5a18c: Status 404 returned error can't find the container with id 1bf963528a2f9b7a218fefaff76c471601f9d6e66751624693246d76a0b5a18c Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.452929 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.456624 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.473723 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.480712 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.498249 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.507259 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.513187 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.533471 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.548496 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.553997 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.561952 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.593680 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.599410 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.599860 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.607324 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:34 crc kubenswrapper[4755]: W0202 22:36:34.619568 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-6df6481df65f432d82af9cf81b45a06b24204aaa0f77edf1130d38e5ad49d050 WatchSource:0}: Error finding container 6df6481df65f432d82af9cf81b45a06b24204aaa0f77edf1130d38e5ad49d050: Status 404 returned error can't find the container with id 6df6481df65f432d82af9cf81b45a06b24204aaa0f77edf1130d38e5ad49d050 Feb 02 22:36:34 crc kubenswrapper[4755]: W0202 22:36:34.626159 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-309c436926506e0b6be53f401c40f138355ab7b82e9deab7733db688fdaa5be8 WatchSource:0}: Error finding container 309c436926506e0b6be53f401c40f138355ab7b82e9deab7733db688fdaa5be8: Status 404 returned error can't find the container with id 309c436926506e0b6be53f401c40f138355ab7b82e9deab7733db688fdaa5be8 Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.633364 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.653317 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.674412 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.693962 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.714191 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.734128 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.754211 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.773932 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.794367 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.814658 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.834300 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.854705 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.874634 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.893911 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.914270 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.933860 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.934947 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"2f9639123af176c8c42c3fc92fe9ff5eb7eeaced7a120563b4c09028a2d5efd2"} Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.935027 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"309c436926506e0b6be53f401c40f138355ab7b82e9deab7733db688fdaa5be8"} Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.937298 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"dea02649614278ed0de669d46cb23c9ae1b685b98df34dc279c92f9603cce818"} Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.937360 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"1bf963528a2f9b7a218fefaff76c471601f9d6e66751624693246d76a0b5a18c"} Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.939231 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"249a2dfc1b02e648eba0366aded3f53af0126e685d8ee2823c14ba404bf779bd"} Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.939278 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"6df6481df65f432d82af9cf81b45a06b24204aaa0f77edf1130d38e5ad49d050"} Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.939507 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.953547 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.974365 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 02 22:36:34 crc kubenswrapper[4755]: I0202 22:36:34.994625 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.014778 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.034176 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.054156 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.075139 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.092963 4755 request.go:700] Waited for 1.010910124s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/secrets?fieldSelector=metadata.name%3Drouter-certs-default&limit=500&resourceVersion=0 Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.094957 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.114062 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.135601 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.154226 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.173594 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.193857 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.223685 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.234276 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.254773 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.274168 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.294587 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.313360 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.333907 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.355173 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.373981 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.393716 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.414175 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.435563 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.454490 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.474981 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.495528 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.514602 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.534405 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.564578 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.574007 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.595047 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.614467 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.635330 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.654644 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.675010 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.694991 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.714163 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.734423 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.755014 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.809083 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nw8r\" (UniqueName: \"kubernetes.io/projected/19316443-6a89-4993-b196-1de2bece6e84-kube-api-access-4nw8r\") pod \"apiserver-7bbb656c7d-7zl97\" (UID: \"19316443-6a89-4993-b196-1de2bece6e84\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.826668 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsntf\" (UniqueName: \"kubernetes.io/projected/49503265-4d0e-484f-8832-38be62c19af0-kube-api-access-jsntf\") pod \"machine-approver-56656f9798-fh5rw\" (UID: \"49503265-4d0e-484f-8832-38be62c19af0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.847993 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgxk9\" (UniqueName: \"kubernetes.io/projected/1c3ea74e-377e-4d51-b82a-33e8ea14cf59-kube-api-access-fgxk9\") pod \"apiserver-76f77b778f-vcjbc\" (UID: \"1c3ea74e-377e-4d51-b82a-33e8ea14cf59\") " pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.854661 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.862900 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-287x9\" (UniqueName: \"kubernetes.io/projected/fbc07c99-ae2c-459e-9731-ca524c8bfa08-kube-api-access-287x9\") pod \"route-controller-manager-6576b87f9c-m8cdx\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.873967 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.894234 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.914789 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.934972 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.953440 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.975192 4755 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 02 22:36:35 crc kubenswrapper[4755]: I0202 22:36:35.993865 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.014020 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.028076 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.056826 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76hsw\" (UniqueName: \"kubernetes.io/projected/21a3e3ee-831b-450a-b0da-13551b7353e4-kube-api-access-76hsw\") pod \"openshift-controller-manager-operator-756b6f6bc6-bwhck\" (UID: \"21a3e3ee-831b-450a-b0da-13551b7353e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.064574 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.073615 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d98f0d35-15ef-4d83-9c0b-104f44f4ae41-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-fzwbz\" (UID: \"d98f0d35-15ef-4d83-9c0b-104f44f4ae41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.073676 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.091767 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.111676 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-49pm9\" (UID: \"7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.111922 4755 request.go:700] Waited for 1.915779845s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns-operator/serviceaccounts/dns-operator/token Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.121536 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj4hs\" (UniqueName: \"kubernetes.io/projected/4730daad-9b5f-4e27-a9d2-8a989d2c40e8-kube-api-access-rj4hs\") pod \"authentication-operator-69f744f599-7qkqs\" (UID: \"4730daad-9b5f-4e27-a9d2-8a989d2c40e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.155470 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.165564 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0a4a5bd4-3691-4bce-9266-3d3a05dae585-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-76ppj\" (UID: \"0a4a5bd4-3691-4bce-9266-3d3a05dae585\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.169972 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q8nk\" (UniqueName: \"kubernetes.io/projected/1812beb5-4070-4649-8b5b-e78e5257c3dc-kube-api-access-7q8nk\") pod \"dns-operator-744455d44c-s9mjc\" (UID: \"1812beb5-4070-4649-8b5b-e78e5257c3dc\") " pod="openshift-dns-operator/dns-operator-744455d44c-s9mjc" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.177050 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f45sx\" (UniqueName: \"kubernetes.io/projected/a6f21874-ee8f-4718-b2ab-8b4a97543364-kube-api-access-f45sx\") pod \"downloads-7954f5f757-n987n\" (UID: \"a6f21874-ee8f-4718-b2ab-8b4a97543364\") " pod="openshift-console/downloads-7954f5f757-n987n" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.192569 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8mx8\" (UniqueName: \"kubernetes.io/projected/c61f342b-cd14-408f-8c6e-e65cee1ebb39-kube-api-access-l8mx8\") pod \"machine-api-operator-5694c8668f-s6v4j\" (UID: \"c61f342b-cd14-408f-8c6e-e65cee1ebb39\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.196864 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-n987n" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.215374 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ttfq\" (UniqueName: \"kubernetes.io/projected/5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2-kube-api-access-8ttfq\") pod \"console-operator-58897d9998-b5zvf\" (UID: \"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2\") " pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.245290 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1713b1df-6fc2-4060-91b9-e631ad9b335f-bound-sa-token\") pod \"ingress-operator-5b745b69d9-sl4x8\" (UID: \"1713b1df-6fc2-4060-91b9-e631ad9b335f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.251223 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4xfm\" (UniqueName: \"kubernetes.io/projected/0124b915-2ac4-4be7-b356-bf78a8295d9d-kube-api-access-q4xfm\") pod \"console-f9d7485db-rwshx\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.268379 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd5kf\" (UniqueName: \"kubernetes.io/projected/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-kube-api-access-wd5kf\") pod \"oauth-openshift-558db77b4-5nv2v\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.284764 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-vcjbc"] Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.294208 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.299067 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-s9mjc" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.299291 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t24jr\" (UniqueName: \"kubernetes.io/projected/82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4-kube-api-access-t24jr\") pod \"etcd-operator-b45778765-ttsv6\" (UID: \"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.321616 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8868\" (UniqueName: \"kubernetes.io/projected/09e605cd-967a-462f-8fad-1cf16ef64351-kube-api-access-p8868\") pod \"controller-manager-879f6c89f-lrgth\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.333249 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl24t\" (UniqueName: \"kubernetes.io/projected/1713b1df-6fc2-4060-91b9-e631ad9b335f-kube-api-access-fl24t\") pod \"ingress-operator-5b745b69d9-sl4x8\" (UID: \"1713b1df-6fc2-4060-91b9-e631ad9b335f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.340076 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.349282 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57k6c\" (UniqueName: \"kubernetes.io/projected/938066f1-bde4-4bbc-ad80-47379f6a66ff-kube-api-access-57k6c\") pod \"migrator-59844c95c7-z7g7z\" (UID: \"938066f1-bde4-4bbc-ad80-47379f6a66ff\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z7g7z" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.349499 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.361486 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.365371 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.370483 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcvcr\" (UniqueName: \"kubernetes.io/projected/79899b57-242d-45e0-8527-2af257b8a5b7-kube-api-access-wcvcr\") pod \"cluster-samples-operator-665b6dd947-qc97t\" (UID: \"79899b57-242d-45e0-8527-2af257b8a5b7\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.375198 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z7g7z" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.390118 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b9deda20-3739-4585-a197-f51ce9a63b8c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-nlddn\" (UID: \"b9deda20-3739-4585-a197-f51ce9a63b8c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.412033 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6vdn\" (UniqueName: \"kubernetes.io/projected/0a4a5bd4-3691-4bce-9266-3d3a05dae585-kube-api-access-p6vdn\") pod \"cluster-image-registry-operator-dc59b4c8b-76ppj\" (UID: \"0a4a5bd4-3691-4bce-9266-3d3a05dae585\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.430049 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9jvr\" (UniqueName: \"kubernetes.io/projected/21bfe782-2848-4171-8919-e1ce96150a09-kube-api-access-t9jvr\") pod \"openshift-apiserver-operator-796bbdcf4f-dx54k\" (UID: \"21bfe782-2848-4171-8919-e1ce96150a09\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.465982 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.478805 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.484232 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.529004 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.529043 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzbl2\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-kube-api-access-qzbl2\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.529066 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-trusted-ca\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.529090 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-registry-certificates\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.529108 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.529126 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-bound-sa-token\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.529143 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.529177 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-registry-tls\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: E0202 22:36:36.529480 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:37.029468444 +0000 UTC m=+152.720688770 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.541627 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.544568 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.563422 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.563487 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx"] Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.568065 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.585149 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.630540 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:36 crc kubenswrapper[4755]: E0202 22:36:36.630882 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:37.130852273 +0000 UTC m=+152.822072609 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.631083 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-registration-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.631165 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ac249dca-286c-4d91-8782-2d3b0676dc68-proxy-tls\") pod \"machine-config-controller-84d6567774-4n2db\" (UID: \"ac249dca-286c-4d91-8782-2d3b0676dc68\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.631264 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-bound-sa-token\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.631402 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdlsw\" (UniqueName: \"kubernetes.io/projected/099069af-444c-4cc1-8c7f-786a4a64aedb-kube-api-access-qdlsw\") pod \"machine-config-operator-74547568cd-69tbt\" (UID: \"099069af-444c-4cc1-8c7f-786a4a64aedb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.631487 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-plugins-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.631557 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ca9ec072-2c92-4631-8c7e-1d5b59f85076-webhook-cert\") pod \"packageserver-d55dfcdfc-sjx8j\" (UID: \"ca9ec072-2c92-4631-8c7e-1d5b59f85076\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.631672 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b45fccf-2dc0-4fab-8a28-76127a690f13-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw2ht\" (UID: \"1b45fccf-2dc0-4fab-8a28-76127a690f13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.631780 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-csi-data-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.631884 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgrl6\" (UniqueName: \"kubernetes.io/projected/8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1-kube-api-access-rgrl6\") pod \"service-ca-9c57cc56f-dgrbx\" (UID: \"8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1\") " pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.632481 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.632581 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fa9699fd-2f6b-460d-9ca7-95ebba8a5d72-profile-collector-cert\") pod \"catalog-operator-68c6474976-gv5dm\" (UID: \"fa9699fd-2f6b-460d-9ca7-95ebba8a5d72\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.632692 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1-signing-key\") pod \"service-ca-9c57cc56f-dgrbx\" (UID: \"8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1\") " pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.632716 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/439a2e89-139f-4356-ac5f-6325ac0c2a92-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-lkd4r\" (UID: \"439a2e89-139f-4356-ac5f-6325ac0c2a92\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lkd4r" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.632841 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d61538e7-e186-4a77-a96a-9d4d9d514837-default-certificate\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.633257 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f6b1859b-cd07-469f-be86-e691a81d5b85-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-h2gd9\" (UID: \"f6b1859b-cd07-469f-be86-e691a81d5b85\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.633568 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/099069af-444c-4cc1-8c7f-786a4a64aedb-proxy-tls\") pod \"machine-config-operator-74547568cd-69tbt\" (UID: \"099069af-444c-4cc1-8c7f-786a4a64aedb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.633685 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbvtz\" (UniqueName: \"kubernetes.io/projected/f6b1859b-cd07-469f-be86-e691a81d5b85-kube-api-access-jbvtz\") pod \"control-plane-machine-set-operator-78cbb6b69f-h2gd9\" (UID: \"f6b1859b-cd07-469f-be86-e691a81d5b85\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.633714 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/47a9f45a-7c7f-41fb-b112-6e43666abe9b-metrics-tls\") pod \"dns-default-4dnjt\" (UID: \"47a9f45a-7c7f-41fb-b112-6e43666abe9b\") " pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.633743 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/411c26b8-a425-41a4-b05c-3bdbd49f6e69-certs\") pod \"machine-config-server-6t4wf\" (UID: \"411c26b8-a425-41a4-b05c-3bdbd49f6e69\") " pod="openshift-machine-config-operator/machine-config-server-6t4wf" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.633778 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322e7205-53dc-4148-b16b-5989623d4cb6-config\") pod \"service-ca-operator-777779d784-lwzgj\" (UID: \"322e7205-53dc-4148-b16b-5989623d4cb6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.633793 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/411c26b8-a425-41a4-b05c-3bdbd49f6e69-node-bootstrap-token\") pod \"machine-config-server-6t4wf\" (UID: \"411c26b8-a425-41a4-b05c-3bdbd49f6e69\") " pod="openshift-machine-config-operator/machine-config-server-6t4wf" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.633807 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/099069af-444c-4cc1-8c7f-786a4a64aedb-auth-proxy-config\") pod \"machine-config-operator-74547568cd-69tbt\" (UID: \"099069af-444c-4cc1-8c7f-786a4a64aedb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.633821 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b45fccf-2dc0-4fab-8a28-76127a690f13-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw2ht\" (UID: \"1b45fccf-2dc0-4fab-8a28-76127a690f13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.633836 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59fvt\" (UniqueName: \"kubernetes.io/projected/1b45fccf-2dc0-4fab-8a28-76127a690f13-kube-api-access-59fvt\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw2ht\" (UID: \"1b45fccf-2dc0-4fab-8a28-76127a690f13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.633968 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkbh4\" (UniqueName: \"kubernetes.io/projected/439a2e89-139f-4356-ac5f-6325ac0c2a92-kube-api-access-fkbh4\") pod \"multus-admission-controller-857f4d67dd-lkd4r\" (UID: \"439a2e89-139f-4356-ac5f-6325ac0c2a92\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lkd4r" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634017 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ac249dca-286c-4d91-8782-2d3b0676dc68-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4n2db\" (UID: \"ac249dca-286c-4d91-8782-2d3b0676dc68\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634066 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jt8x\" (UniqueName: \"kubernetes.io/projected/47a9f45a-7c7f-41fb-b112-6e43666abe9b-kube-api-access-8jt8x\") pod \"dns-default-4dnjt\" (UID: \"47a9f45a-7c7f-41fb-b112-6e43666abe9b\") " pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634094 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbkkr\" (UniqueName: \"kubernetes.io/projected/56ca6971-8e8a-485d-886d-f5c8eadef3de-kube-api-access-cbkkr\") pod \"openshift-config-operator-7777fb866f-5hjgs\" (UID: \"56ca6971-8e8a-485d-886d-f5c8eadef3de\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634112 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ca9ec072-2c92-4631-8c7e-1d5b59f85076-tmpfs\") pod \"packageserver-d55dfcdfc-sjx8j\" (UID: \"ca9ec072-2c92-4631-8c7e-1d5b59f85076\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634155 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634173 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6r9j\" (UniqueName: \"kubernetes.io/projected/2d480358-faea-430f-97ad-c49f7878007b-kube-api-access-n6r9j\") pod \"collect-profiles-29501190-785d5\" (UID: \"2d480358-faea-430f-97ad-c49f7878007b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634189 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-socket-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634253 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d61538e7-e186-4a77-a96a-9d4d9d514837-stats-auth\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634360 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/60190c83-51d0-42d2-985b-6f790587622e-srv-cert\") pod \"olm-operator-6b444d44fb-hg7t4\" (UID: \"60190c83-51d0-42d2-985b-6f790587622e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634391 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsdgw\" (UniqueName: \"kubernetes.io/projected/fa9699fd-2f6b-460d-9ca7-95ebba8a5d72-kube-api-access-zsdgw\") pod \"catalog-operator-68c6474976-gv5dm\" (UID: \"fa9699fd-2f6b-460d-9ca7-95ebba8a5d72\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634411 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1-signing-cabundle\") pod \"service-ca-9c57cc56f-dgrbx\" (UID: \"8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1\") " pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634446 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/099069af-444c-4cc1-8c7f-786a4a64aedb-images\") pod \"machine-config-operator-74547568cd-69tbt\" (UID: \"099069af-444c-4cc1-8c7f-786a4a64aedb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.634462 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-455qd\" (UniqueName: \"kubernetes.io/projected/d61538e7-e186-4a77-a96a-9d4d9d514837-kube-api-access-455qd\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: E0202 22:36:36.634872 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:37.134861607 +0000 UTC m=+152.826081933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.635393 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-registry-certificates\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.635431 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n7fz\" (UniqueName: \"kubernetes.io/projected/60190c83-51d0-42d2-985b-6f790587622e-kube-api-access-7n7fz\") pod \"olm-operator-6b444d44fb-hg7t4\" (UID: \"60190c83-51d0-42d2-985b-6f790587622e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.635449 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0aec75ad-50ac-47c3-ad95-efd5b76ed561-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kc7j8\" (UID: \"0aec75ad-50ac-47c3-ad95-efd5b76ed561\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.635628 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.635651 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47a9f45a-7c7f-41fb-b112-6e43666abe9b-config-volume\") pod \"dns-default-4dnjt\" (UID: \"47a9f45a-7c7f-41fb-b112-6e43666abe9b\") " pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.635725 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322e7205-53dc-4148-b16b-5989623d4cb6-serving-cert\") pod \"service-ca-operator-777779d784-lwzgj\" (UID: \"322e7205-53dc-4148-b16b-5989623d4cb6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.635758 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl6rv\" (UniqueName: \"kubernetes.io/projected/467ef27d-8f51-4317-80ee-9071d7024f86-kube-api-access-dl6rv\") pod \"marketplace-operator-79b997595-hnz5f\" (UID: \"467ef27d-8f51-4317-80ee-9071d7024f86\") " pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.635842 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/56ca6971-8e8a-485d-886d-f5c8eadef3de-available-featuregates\") pod \"openshift-config-operator-7777fb866f-5hjgs\" (UID: \"56ca6971-8e8a-485d-886d-f5c8eadef3de\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.635885 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.635922 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcrk8\" (UniqueName: \"kubernetes.io/projected/209646e7-0d11-4f45-9222-ca6ac4d92268-kube-api-access-dcrk8\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.635939 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9q42c\" (UniqueName: \"kubernetes.io/projected/0aec75ad-50ac-47c3-ad95-efd5b76ed561-kube-api-access-9q42c\") pod \"package-server-manager-789f6589d5-kc7j8\" (UID: \"0aec75ad-50ac-47c3-ad95-efd5b76ed561\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.636546 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-registry-certificates\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.637164 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.637213 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6gsz\" (UniqueName: \"kubernetes.io/projected/ca9ec072-2c92-4631-8c7e-1d5b59f85076-kube-api-access-m6gsz\") pod \"packageserver-d55dfcdfc-sjx8j\" (UID: \"ca9ec072-2c92-4631-8c7e-1d5b59f85076\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.637236 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/60190c83-51d0-42d2-985b-6f790587622e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hg7t4\" (UID: \"60190c83-51d0-42d2-985b-6f790587622e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.637444 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96042d4b-00db-43c7-9f94-203fa722a690-cert\") pod \"ingress-canary-vdz28\" (UID: \"96042d4b-00db-43c7-9f94-203fa722a690\") " pod="openshift-ingress-canary/ingress-canary-vdz28" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.637614 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d480358-faea-430f-97ad-c49f7878007b-config-volume\") pod \"collect-profiles-29501190-785d5\" (UID: \"2d480358-faea-430f-97ad-c49f7878007b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.637640 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4p9r\" (UniqueName: \"kubernetes.io/projected/ac249dca-286c-4d91-8782-2d3b0676dc68-kube-api-access-z4p9r\") pod \"machine-config-controller-84d6567774-4n2db\" (UID: \"ac249dca-286c-4d91-8782-2d3b0676dc68\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.637668 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56ca6971-8e8a-485d-886d-f5c8eadef3de-serving-cert\") pod \"openshift-config-operator-7777fb866f-5hjgs\" (UID: \"56ca6971-8e8a-485d-886d-f5c8eadef3de\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.638155 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d61538e7-e186-4a77-a96a-9d4d9d514837-metrics-certs\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.638184 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ca9ec072-2c92-4631-8c7e-1d5b59f85076-apiservice-cert\") pod \"packageserver-d55dfcdfc-sjx8j\" (UID: \"ca9ec072-2c92-4631-8c7e-1d5b59f85076\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.638216 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d480358-faea-430f-97ad-c49f7878007b-secret-volume\") pod \"collect-profiles-29501190-785d5\" (UID: \"2d480358-faea-430f-97ad-c49f7878007b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.638235 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-mountpoint-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.638641 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fa9699fd-2f6b-460d-9ca7-95ebba8a5d72-srv-cert\") pod \"catalog-operator-68c6474976-gv5dm\" (UID: \"fa9699fd-2f6b-460d-9ca7-95ebba8a5d72\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.638676 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/467ef27d-8f51-4317-80ee-9071d7024f86-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hnz5f\" (UID: \"467ef27d-8f51-4317-80ee-9071d7024f86\") " pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.639458 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48qwv\" (UniqueName: \"kubernetes.io/projected/322e7205-53dc-4148-b16b-5989623d4cb6-kube-api-access-48qwv\") pod \"service-ca-operator-777779d784-lwzgj\" (UID: \"322e7205-53dc-4148-b16b-5989623d4cb6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.639488 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/467ef27d-8f51-4317-80ee-9071d7024f86-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hnz5f\" (UID: \"467ef27d-8f51-4317-80ee-9071d7024f86\") " pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.639590 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-registry-tls\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.642418 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d61538e7-e186-4a77-a96a-9d4d9d514837-service-ca-bundle\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.643458 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzbl2\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-kube-api-access-qzbl2\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.643513 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdvtl\" (UniqueName: \"kubernetes.io/projected/411c26b8-a425-41a4-b05c-3bdbd49f6e69-kube-api-access-xdvtl\") pod \"machine-config-server-6t4wf\" (UID: \"411c26b8-a425-41a4-b05c-3bdbd49f6e69\") " pod="openshift-machine-config-operator/machine-config-server-6t4wf" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.643542 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhqzb\" (UniqueName: \"kubernetes.io/projected/96042d4b-00db-43c7-9f94-203fa722a690-kube-api-access-xhqzb\") pod \"ingress-canary-vdz28\" (UID: \"96042d4b-00db-43c7-9f94-203fa722a690\") " pod="openshift-ingress-canary/ingress-canary-vdz28" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.643560 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-trusted-ca\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.645275 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.645769 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-trusted-ca\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.646398 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-registry-tls\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.651768 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97"] Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.666531 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-7qkqs"] Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.670255 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-bound-sa-token\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.708784 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzbl2\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-kube-api-access-qzbl2\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.728443 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-n987n"] Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746289 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746503 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhqzb\" (UniqueName: \"kubernetes.io/projected/96042d4b-00db-43c7-9f94-203fa722a690-kube-api-access-xhqzb\") pod \"ingress-canary-vdz28\" (UID: \"96042d4b-00db-43c7-9f94-203fa722a690\") " pod="openshift-ingress-canary/ingress-canary-vdz28" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746544 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdvtl\" (UniqueName: \"kubernetes.io/projected/411c26b8-a425-41a4-b05c-3bdbd49f6e69-kube-api-access-xdvtl\") pod \"machine-config-server-6t4wf\" (UID: \"411c26b8-a425-41a4-b05c-3bdbd49f6e69\") " pod="openshift-machine-config-operator/machine-config-server-6t4wf" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746564 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-registration-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746584 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ac249dca-286c-4d91-8782-2d3b0676dc68-proxy-tls\") pod \"machine-config-controller-84d6567774-4n2db\" (UID: \"ac249dca-286c-4d91-8782-2d3b0676dc68\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746618 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdlsw\" (UniqueName: \"kubernetes.io/projected/099069af-444c-4cc1-8c7f-786a4a64aedb-kube-api-access-qdlsw\") pod \"machine-config-operator-74547568cd-69tbt\" (UID: \"099069af-444c-4cc1-8c7f-786a4a64aedb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746642 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ca9ec072-2c92-4631-8c7e-1d5b59f85076-webhook-cert\") pod \"packageserver-d55dfcdfc-sjx8j\" (UID: \"ca9ec072-2c92-4631-8c7e-1d5b59f85076\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746658 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-plugins-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746673 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-csi-data-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746703 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b45fccf-2dc0-4fab-8a28-76127a690f13-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw2ht\" (UID: \"1b45fccf-2dc0-4fab-8a28-76127a690f13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746721 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgrl6\" (UniqueName: \"kubernetes.io/projected/8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1-kube-api-access-rgrl6\") pod \"service-ca-9c57cc56f-dgrbx\" (UID: \"8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1\") " pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746754 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fa9699fd-2f6b-460d-9ca7-95ebba8a5d72-profile-collector-cert\") pod \"catalog-operator-68c6474976-gv5dm\" (UID: \"fa9699fd-2f6b-460d-9ca7-95ebba8a5d72\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746782 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1-signing-key\") pod \"service-ca-9c57cc56f-dgrbx\" (UID: \"8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1\") " pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746798 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/439a2e89-139f-4356-ac5f-6325ac0c2a92-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-lkd4r\" (UID: \"439a2e89-139f-4356-ac5f-6325ac0c2a92\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lkd4r" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746832 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d61538e7-e186-4a77-a96a-9d4d9d514837-default-certificate\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746854 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f6b1859b-cd07-469f-be86-e691a81d5b85-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-h2gd9\" (UID: \"f6b1859b-cd07-469f-be86-e691a81d5b85\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746872 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/099069af-444c-4cc1-8c7f-786a4a64aedb-proxy-tls\") pod \"machine-config-operator-74547568cd-69tbt\" (UID: \"099069af-444c-4cc1-8c7f-786a4a64aedb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746912 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbvtz\" (UniqueName: \"kubernetes.io/projected/f6b1859b-cd07-469f-be86-e691a81d5b85-kube-api-access-jbvtz\") pod \"control-plane-machine-set-operator-78cbb6b69f-h2gd9\" (UID: \"f6b1859b-cd07-469f-be86-e691a81d5b85\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746929 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/47a9f45a-7c7f-41fb-b112-6e43666abe9b-metrics-tls\") pod \"dns-default-4dnjt\" (UID: \"47a9f45a-7c7f-41fb-b112-6e43666abe9b\") " pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746943 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/411c26b8-a425-41a4-b05c-3bdbd49f6e69-certs\") pod \"machine-config-server-6t4wf\" (UID: \"411c26b8-a425-41a4-b05c-3bdbd49f6e69\") " pod="openshift-machine-config-operator/machine-config-server-6t4wf" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.746959 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322e7205-53dc-4148-b16b-5989623d4cb6-config\") pod \"service-ca-operator-777779d784-lwzgj\" (UID: \"322e7205-53dc-4148-b16b-5989623d4cb6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747031 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/411c26b8-a425-41a4-b05c-3bdbd49f6e69-node-bootstrap-token\") pod \"machine-config-server-6t4wf\" (UID: \"411c26b8-a425-41a4-b05c-3bdbd49f6e69\") " pod="openshift-machine-config-operator/machine-config-server-6t4wf" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747047 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59fvt\" (UniqueName: \"kubernetes.io/projected/1b45fccf-2dc0-4fab-8a28-76127a690f13-kube-api-access-59fvt\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw2ht\" (UID: \"1b45fccf-2dc0-4fab-8a28-76127a690f13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747063 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/099069af-444c-4cc1-8c7f-786a4a64aedb-auth-proxy-config\") pod \"machine-config-operator-74547568cd-69tbt\" (UID: \"099069af-444c-4cc1-8c7f-786a4a64aedb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747078 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b45fccf-2dc0-4fab-8a28-76127a690f13-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw2ht\" (UID: \"1b45fccf-2dc0-4fab-8a28-76127a690f13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747108 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jt8x\" (UniqueName: \"kubernetes.io/projected/47a9f45a-7c7f-41fb-b112-6e43666abe9b-kube-api-access-8jt8x\") pod \"dns-default-4dnjt\" (UID: \"47a9f45a-7c7f-41fb-b112-6e43666abe9b\") " pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747145 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbkkr\" (UniqueName: \"kubernetes.io/projected/56ca6971-8e8a-485d-886d-f5c8eadef3de-kube-api-access-cbkkr\") pod \"openshift-config-operator-7777fb866f-5hjgs\" (UID: \"56ca6971-8e8a-485d-886d-f5c8eadef3de\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747158 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkbh4\" (UniqueName: \"kubernetes.io/projected/439a2e89-139f-4356-ac5f-6325ac0c2a92-kube-api-access-fkbh4\") pod \"multus-admission-controller-857f4d67dd-lkd4r\" (UID: \"439a2e89-139f-4356-ac5f-6325ac0c2a92\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lkd4r" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747190 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ac249dca-286c-4d91-8782-2d3b0676dc68-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4n2db\" (UID: \"ac249dca-286c-4d91-8782-2d3b0676dc68\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747214 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6r9j\" (UniqueName: \"kubernetes.io/projected/2d480358-faea-430f-97ad-c49f7878007b-kube-api-access-n6r9j\") pod \"collect-profiles-29501190-785d5\" (UID: \"2d480358-faea-430f-97ad-c49f7878007b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747228 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-socket-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747256 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ca9ec072-2c92-4631-8c7e-1d5b59f85076-tmpfs\") pod \"packageserver-d55dfcdfc-sjx8j\" (UID: \"ca9ec072-2c92-4631-8c7e-1d5b59f85076\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747273 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d61538e7-e186-4a77-a96a-9d4d9d514837-stats-auth\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747288 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/60190c83-51d0-42d2-985b-6f790587622e-srv-cert\") pod \"olm-operator-6b444d44fb-hg7t4\" (UID: \"60190c83-51d0-42d2-985b-6f790587622e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747302 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsdgw\" (UniqueName: \"kubernetes.io/projected/fa9699fd-2f6b-460d-9ca7-95ebba8a5d72-kube-api-access-zsdgw\") pod \"catalog-operator-68c6474976-gv5dm\" (UID: \"fa9699fd-2f6b-460d-9ca7-95ebba8a5d72\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747332 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1-signing-cabundle\") pod \"service-ca-9c57cc56f-dgrbx\" (UID: \"8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1\") " pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747347 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/099069af-444c-4cc1-8c7f-786a4a64aedb-images\") pod \"machine-config-operator-74547568cd-69tbt\" (UID: \"099069af-444c-4cc1-8c7f-786a4a64aedb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747361 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-455qd\" (UniqueName: \"kubernetes.io/projected/d61538e7-e186-4a77-a96a-9d4d9d514837-kube-api-access-455qd\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747386 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n7fz\" (UniqueName: \"kubernetes.io/projected/60190c83-51d0-42d2-985b-6f790587622e-kube-api-access-7n7fz\") pod \"olm-operator-6b444d44fb-hg7t4\" (UID: \"60190c83-51d0-42d2-985b-6f790587622e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747417 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0aec75ad-50ac-47c3-ad95-efd5b76ed561-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kc7j8\" (UID: \"0aec75ad-50ac-47c3-ad95-efd5b76ed561\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747439 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47a9f45a-7c7f-41fb-b112-6e43666abe9b-config-volume\") pod \"dns-default-4dnjt\" (UID: \"47a9f45a-7c7f-41fb-b112-6e43666abe9b\") " pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747453 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322e7205-53dc-4148-b16b-5989623d4cb6-serving-cert\") pod \"service-ca-operator-777779d784-lwzgj\" (UID: \"322e7205-53dc-4148-b16b-5989623d4cb6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747468 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl6rv\" (UniqueName: \"kubernetes.io/projected/467ef27d-8f51-4317-80ee-9071d7024f86-kube-api-access-dl6rv\") pod \"marketplace-operator-79b997595-hnz5f\" (UID: \"467ef27d-8f51-4317-80ee-9071d7024f86\") " pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747501 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/56ca6971-8e8a-485d-886d-f5c8eadef3de-available-featuregates\") pod \"openshift-config-operator-7777fb866f-5hjgs\" (UID: \"56ca6971-8e8a-485d-886d-f5c8eadef3de\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747517 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcrk8\" (UniqueName: \"kubernetes.io/projected/209646e7-0d11-4f45-9222-ca6ac4d92268-kube-api-access-dcrk8\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747533 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9q42c\" (UniqueName: \"kubernetes.io/projected/0aec75ad-50ac-47c3-ad95-efd5b76ed561-kube-api-access-9q42c\") pod \"package-server-manager-789f6589d5-kc7j8\" (UID: \"0aec75ad-50ac-47c3-ad95-efd5b76ed561\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747565 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6gsz\" (UniqueName: \"kubernetes.io/projected/ca9ec072-2c92-4631-8c7e-1d5b59f85076-kube-api-access-m6gsz\") pod \"packageserver-d55dfcdfc-sjx8j\" (UID: \"ca9ec072-2c92-4631-8c7e-1d5b59f85076\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747581 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96042d4b-00db-43c7-9f94-203fa722a690-cert\") pod \"ingress-canary-vdz28\" (UID: \"96042d4b-00db-43c7-9f94-203fa722a690\") " pod="openshift-ingress-canary/ingress-canary-vdz28" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747595 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/60190c83-51d0-42d2-985b-6f790587622e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hg7t4\" (UID: \"60190c83-51d0-42d2-985b-6f790587622e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747609 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d480358-faea-430f-97ad-c49f7878007b-config-volume\") pod \"collect-profiles-29501190-785d5\" (UID: \"2d480358-faea-430f-97ad-c49f7878007b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747624 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4p9r\" (UniqueName: \"kubernetes.io/projected/ac249dca-286c-4d91-8782-2d3b0676dc68-kube-api-access-z4p9r\") pod \"machine-config-controller-84d6567774-4n2db\" (UID: \"ac249dca-286c-4d91-8782-2d3b0676dc68\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747656 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56ca6971-8e8a-485d-886d-f5c8eadef3de-serving-cert\") pod \"openshift-config-operator-7777fb866f-5hjgs\" (UID: \"56ca6971-8e8a-485d-886d-f5c8eadef3de\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747672 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d480358-faea-430f-97ad-c49f7878007b-secret-volume\") pod \"collect-profiles-29501190-785d5\" (UID: \"2d480358-faea-430f-97ad-c49f7878007b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747698 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-mountpoint-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747755 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d61538e7-e186-4a77-a96a-9d4d9d514837-metrics-certs\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747772 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ca9ec072-2c92-4631-8c7e-1d5b59f85076-apiservice-cert\") pod \"packageserver-d55dfcdfc-sjx8j\" (UID: \"ca9ec072-2c92-4631-8c7e-1d5b59f85076\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747811 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/467ef27d-8f51-4317-80ee-9071d7024f86-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hnz5f\" (UID: \"467ef27d-8f51-4317-80ee-9071d7024f86\") " pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747827 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fa9699fd-2f6b-460d-9ca7-95ebba8a5d72-srv-cert\") pod \"catalog-operator-68c6474976-gv5dm\" (UID: \"fa9699fd-2f6b-460d-9ca7-95ebba8a5d72\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747846 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48qwv\" (UniqueName: \"kubernetes.io/projected/322e7205-53dc-4148-b16b-5989623d4cb6-kube-api-access-48qwv\") pod \"service-ca-operator-777779d784-lwzgj\" (UID: \"322e7205-53dc-4148-b16b-5989623d4cb6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747876 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/467ef27d-8f51-4317-80ee-9071d7024f86-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hnz5f\" (UID: \"467ef27d-8f51-4317-80ee-9071d7024f86\") " pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.747896 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d61538e7-e186-4a77-a96a-9d4d9d514837-service-ca-bundle\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: E0202 22:36:36.748020 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:37.248002402 +0000 UTC m=+152.939222728 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.748286 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-registration-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.748897 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/56ca6971-8e8a-485d-886d-f5c8eadef3de-available-featuregates\") pod \"openshift-config-operator-7777fb866f-5hjgs\" (UID: \"56ca6971-8e8a-485d-886d-f5c8eadef3de\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.749521 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ac249dca-286c-4d91-8782-2d3b0676dc68-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4n2db\" (UID: \"ac249dca-286c-4d91-8782-2d3b0676dc68\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.749751 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-socket-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.750092 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d61538e7-e186-4a77-a96a-9d4d9d514837-service-ca-bundle\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.750179 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ca9ec072-2c92-4631-8c7e-1d5b59f85076-tmpfs\") pod \"packageserver-d55dfcdfc-sjx8j\" (UID: \"ca9ec072-2c92-4631-8c7e-1d5b59f85076\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.753771 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-plugins-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.753851 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-csi-data-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.754924 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/099069af-444c-4cc1-8c7f-786a4a64aedb-auth-proxy-config\") pod \"machine-config-operator-74547568cd-69tbt\" (UID: \"099069af-444c-4cc1-8c7f-786a4a64aedb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.755154 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1-signing-cabundle\") pod \"service-ca-9c57cc56f-dgrbx\" (UID: \"8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1\") " pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.755665 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b45fccf-2dc0-4fab-8a28-76127a690f13-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw2ht\" (UID: \"1b45fccf-2dc0-4fab-8a28-76127a690f13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.760212 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d480358-faea-430f-97ad-c49f7878007b-secret-volume\") pod \"collect-profiles-29501190-785d5\" (UID: \"2d480358-faea-430f-97ad-c49f7878007b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.761869 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/467ef27d-8f51-4317-80ee-9071d7024f86-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hnz5f\" (UID: \"467ef27d-8f51-4317-80ee-9071d7024f86\") " pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.762327 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ca9ec072-2c92-4631-8c7e-1d5b59f85076-apiservice-cert\") pod \"packageserver-d55dfcdfc-sjx8j\" (UID: \"ca9ec072-2c92-4631-8c7e-1d5b59f85076\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.762497 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322e7205-53dc-4148-b16b-5989623d4cb6-config\") pod \"service-ca-operator-777779d784-lwzgj\" (UID: \"322e7205-53dc-4148-b16b-5989623d4cb6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.762884 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56ca6971-8e8a-485d-886d-f5c8eadef3de-serving-cert\") pod \"openshift-config-operator-7777fb866f-5hjgs\" (UID: \"56ca6971-8e8a-485d-886d-f5c8eadef3de\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.762938 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/209646e7-0d11-4f45-9222-ca6ac4d92268-mountpoint-dir\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.763402 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ca9ec072-2c92-4631-8c7e-1d5b59f85076-webhook-cert\") pod \"packageserver-d55dfcdfc-sjx8j\" (UID: \"ca9ec072-2c92-4631-8c7e-1d5b59f85076\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.764090 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ac249dca-286c-4d91-8782-2d3b0676dc68-proxy-tls\") pod \"machine-config-controller-84d6567774-4n2db\" (UID: \"ac249dca-286c-4d91-8782-2d3b0676dc68\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.764108 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f6b1859b-cd07-469f-be86-e691a81d5b85-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-h2gd9\" (UID: \"f6b1859b-cd07-469f-be86-e691a81d5b85\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.764265 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47a9f45a-7c7f-41fb-b112-6e43666abe9b-config-volume\") pod \"dns-default-4dnjt\" (UID: \"47a9f45a-7c7f-41fb-b112-6e43666abe9b\") " pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.764838 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/467ef27d-8f51-4317-80ee-9071d7024f86-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hnz5f\" (UID: \"467ef27d-8f51-4317-80ee-9071d7024f86\") " pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.764885 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/099069af-444c-4cc1-8c7f-786a4a64aedb-proxy-tls\") pod \"machine-config-operator-74547568cd-69tbt\" (UID: \"099069af-444c-4cc1-8c7f-786a4a64aedb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.765475 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/411c26b8-a425-41a4-b05c-3bdbd49f6e69-node-bootstrap-token\") pod \"machine-config-server-6t4wf\" (UID: \"411c26b8-a425-41a4-b05c-3bdbd49f6e69\") " pod="openshift-machine-config-operator/machine-config-server-6t4wf" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.766289 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/47a9f45a-7c7f-41fb-b112-6e43666abe9b-metrics-tls\") pod \"dns-default-4dnjt\" (UID: \"47a9f45a-7c7f-41fb-b112-6e43666abe9b\") " pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.766396 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/411c26b8-a425-41a4-b05c-3bdbd49f6e69-certs\") pod \"machine-config-server-6t4wf\" (UID: \"411c26b8-a425-41a4-b05c-3bdbd49f6e69\") " pod="openshift-machine-config-operator/machine-config-server-6t4wf" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.766568 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/60190c83-51d0-42d2-985b-6f790587622e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hg7t4\" (UID: \"60190c83-51d0-42d2-985b-6f790587622e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.766667 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d61538e7-e186-4a77-a96a-9d4d9d514837-stats-auth\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.766882 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d61538e7-e186-4a77-a96a-9d4d9d514837-default-certificate\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.766895 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d480358-faea-430f-97ad-c49f7878007b-config-volume\") pod \"collect-profiles-29501190-785d5\" (UID: \"2d480358-faea-430f-97ad-c49f7878007b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.767161 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96042d4b-00db-43c7-9f94-203fa722a690-cert\") pod \"ingress-canary-vdz28\" (UID: \"96042d4b-00db-43c7-9f94-203fa722a690\") " pod="openshift-ingress-canary/ingress-canary-vdz28" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.773259 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/60190c83-51d0-42d2-985b-6f790587622e-srv-cert\") pod \"olm-operator-6b444d44fb-hg7t4\" (UID: \"60190c83-51d0-42d2-985b-6f790587622e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.773947 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fa9699fd-2f6b-460d-9ca7-95ebba8a5d72-srv-cert\") pod \"catalog-operator-68c6474976-gv5dm\" (UID: \"fa9699fd-2f6b-460d-9ca7-95ebba8a5d72\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.775138 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/439a2e89-139f-4356-ac5f-6325ac0c2a92-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-lkd4r\" (UID: \"439a2e89-139f-4356-ac5f-6325ac0c2a92\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lkd4r" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.775964 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0aec75ad-50ac-47c3-ad95-efd5b76ed561-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kc7j8\" (UID: \"0aec75ad-50ac-47c3-ad95-efd5b76ed561\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.776584 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/099069af-444c-4cc1-8c7f-786a4a64aedb-images\") pod \"machine-config-operator-74547568cd-69tbt\" (UID: \"099069af-444c-4cc1-8c7f-786a4a64aedb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.776852 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d61538e7-e186-4a77-a96a-9d4d9d514837-metrics-certs\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.777092 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fa9699fd-2f6b-460d-9ca7-95ebba8a5d72-profile-collector-cert\") pod \"catalog-operator-68c6474976-gv5dm\" (UID: \"fa9699fd-2f6b-460d-9ca7-95ebba8a5d72\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.780360 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1-signing-key\") pod \"service-ca-9c57cc56f-dgrbx\" (UID: \"8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1\") " pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.783965 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b45fccf-2dc0-4fab-8a28-76127a690f13-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw2ht\" (UID: \"1b45fccf-2dc0-4fab-8a28-76127a690f13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.785045 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322e7205-53dc-4148-b16b-5989623d4cb6-serving-cert\") pod \"service-ca-operator-777779d784-lwzgj\" (UID: \"322e7205-53dc-4148-b16b-5989623d4cb6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.788350 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkbh4\" (UniqueName: \"kubernetes.io/projected/439a2e89-139f-4356-ac5f-6325ac0c2a92-kube-api-access-fkbh4\") pod \"multus-admission-controller-857f4d67dd-lkd4r\" (UID: \"439a2e89-139f-4356-ac5f-6325ac0c2a92\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lkd4r" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.802863 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck"] Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.814233 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-s9mjc"] Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.834785 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-lkd4r" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.841472 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhqzb\" (UniqueName: \"kubernetes.io/projected/96042d4b-00db-43c7-9f94-203fa722a690-kube-api-access-xhqzb\") pod \"ingress-canary-vdz28\" (UID: \"96042d4b-00db-43c7-9f94-203fa722a690\") " pod="openshift-ingress-canary/ingress-canary-vdz28" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.842788 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdvtl\" (UniqueName: \"kubernetes.io/projected/411c26b8-a425-41a4-b05c-3bdbd49f6e69-kube-api-access-xdvtl\") pod \"machine-config-server-6t4wf\" (UID: \"411c26b8-a425-41a4-b05c-3bdbd49f6e69\") " pod="openshift-machine-config-operator/machine-config-server-6t4wf" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.846145 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-rwshx"] Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.849904 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.850006 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6r9j\" (UniqueName: \"kubernetes.io/projected/2d480358-faea-430f-97ad-c49f7878007b-kube-api-access-n6r9j\") pod \"collect-profiles-29501190-785d5\" (UID: \"2d480358-faea-430f-97ad-c49f7878007b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:36 crc kubenswrapper[4755]: E0202 22:36:36.850250 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:37.350237735 +0000 UTC m=+153.041458061 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.860460 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6t4wf" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.867804 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgrl6\" (UniqueName: \"kubernetes.io/projected/8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1-kube-api-access-rgrl6\") pod \"service-ca-9c57cc56f-dgrbx\" (UID: \"8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1\") " pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.889470 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9q42c\" (UniqueName: \"kubernetes.io/projected/0aec75ad-50ac-47c3-ad95-efd5b76ed561-kube-api-access-9q42c\") pod \"package-server-manager-789f6589d5-kc7j8\" (UID: \"0aec75ad-50ac-47c3-ad95-efd5b76ed561\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.906528 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcrk8\" (UniqueName: \"kubernetes.io/projected/209646e7-0d11-4f45-9222-ca6ac4d92268-kube-api-access-dcrk8\") pod \"csi-hostpathplugin-gn7kw\" (UID: \"209646e7-0d11-4f45-9222-ca6ac4d92268\") " pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:36 crc kubenswrapper[4755]: W0202 22:36:36.918686 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod411c26b8_a425_41a4_b05c_3bdbd49f6e69.slice/crio-d365fe9803aea2209d258db1696d9132cd7dd1dfd3be2cfc8589102de176f96d WatchSource:0}: Error finding container d365fe9803aea2209d258db1696d9132cd7dd1dfd3be2cfc8589102de176f96d: Status 404 returned error can't find the container with id d365fe9803aea2209d258db1696d9132cd7dd1dfd3be2cfc8589102de176f96d Feb 02 22:36:36 crc kubenswrapper[4755]: W0202 22:36:36.918993 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1812beb5_4070_4649_8b5b_e78e5257c3dc.slice/crio-a4555d48ebc6c5c6d5c6ba6c5cfd1df00dace8eb38dfb39a83bcb34bcd0d381d WatchSource:0}: Error finding container a4555d48ebc6c5c6d5c6ba6c5cfd1df00dace8eb38dfb39a83bcb34bcd0d381d: Status 404 returned error can't find the container with id a4555d48ebc6c5c6d5c6ba6c5cfd1df00dace8eb38dfb39a83bcb34bcd0d381d Feb 02 22:36:36 crc kubenswrapper[4755]: W0202 22:36:36.932751 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0124b915_2ac4_4be7_b356_bf78a8295d9d.slice/crio-0e49a6d6b04d60ab110958c19cbec811b81e0d7e3b7c6dc461753113ace371e1 WatchSource:0}: Error finding container 0e49a6d6b04d60ab110958c19cbec811b81e0d7e3b7c6dc461753113ace371e1: Status 404 returned error can't find the container with id 0e49a6d6b04d60ab110958c19cbec811b81e0d7e3b7c6dc461753113ace371e1 Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.950211 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6gsz\" (UniqueName: \"kubernetes.io/projected/ca9ec072-2c92-4631-8c7e-1d5b59f85076-kube-api-access-m6gsz\") pod \"packageserver-d55dfcdfc-sjx8j\" (UID: \"ca9ec072-2c92-4631-8c7e-1d5b59f85076\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.950459 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:36 crc kubenswrapper[4755]: E0202 22:36:36.950856 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:37.450809031 +0000 UTC m=+153.142029357 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.952528 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdlsw\" (UniqueName: \"kubernetes.io/projected/099069af-444c-4cc1-8c7f-786a4a64aedb-kube-api-access-qdlsw\") pod \"machine-config-operator-74547568cd-69tbt\" (UID: \"099069af-444c-4cc1-8c7f-786a4a64aedb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.964388 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" event={"ID":"49503265-4d0e-484f-8832-38be62c19af0","Type":"ContainerStarted","Data":"1c3b385e7b70c7dd619e83db317f48b2f2fb901552a79fa846c9e9d104013e66"} Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.964438 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" event={"ID":"49503265-4d0e-484f-8832-38be62c19af0","Type":"ContainerStarted","Data":"f7094ee0623b7acedbaeff237e0fa5a6c615e9cc5a021a275747940ad3c343bd"} Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.968419 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" event={"ID":"21a3e3ee-831b-450a-b0da-13551b7353e4","Type":"ContainerStarted","Data":"27abd2423a1c1d1f4e8873d386d69768d15e528f9c7774a2d02280779cb7aa2c"} Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.972594 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" event={"ID":"fbc07c99-ae2c-459e-9731-ca524c8bfa08","Type":"ContainerStarted","Data":"a151fab0c988d424ed0d48f166e5abd543b8158db33024ca22de1a7eee5d49a7"} Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.972650 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" event={"ID":"fbc07c99-ae2c-459e-9731-ca524c8bfa08","Type":"ContainerStarted","Data":"37936b9f7ff47dcc3ccf4440660de8629720b7ccd6c8f088f44de3293f748ea6"} Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.973072 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.975461 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" event={"ID":"19316443-6a89-4993-b196-1de2bece6e84","Type":"ContainerStarted","Data":"73ebd57e1dcbc0b381d5d2b7c7d7baebecf81abc9f6c4bc68214a336854f10ba"} Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.975838 4755 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-m8cdx container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.975873 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" podUID="fbc07c99-ae2c-459e-9731-ca524c8bfa08" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.977807 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48qwv\" (UniqueName: \"kubernetes.io/projected/322e7205-53dc-4148-b16b-5989623d4cb6-kube-api-access-48qwv\") pod \"service-ca-operator-777779d784-lwzgj\" (UID: \"322e7205-53dc-4148-b16b-5989623d4cb6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.979003 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6t4wf" event={"ID":"411c26b8-a425-41a4-b05c-3bdbd49f6e69","Type":"ContainerStarted","Data":"d365fe9803aea2209d258db1696d9132cd7dd1dfd3be2cfc8589102de176f96d"} Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.982721 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-s9mjc" event={"ID":"1812beb5-4070-4649-8b5b-e78e5257c3dc","Type":"ContainerStarted","Data":"a4555d48ebc6c5c6d5c6ba6c5cfd1df00dace8eb38dfb39a83bcb34bcd0d381d"} Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.987811 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-n987n" event={"ID":"a6f21874-ee8f-4718-b2ab-8b4a97543364","Type":"ContainerStarted","Data":"71df3162ffe454d773481f365763434233374fedabd9b3f3c83e2824d6ec1721"} Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.989999 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59fvt\" (UniqueName: \"kubernetes.io/projected/1b45fccf-2dc0-4fab-8a28-76127a690f13-kube-api-access-59fvt\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw2ht\" (UID: \"1b45fccf-2dc0-4fab-8a28-76127a690f13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.995139 4755 generic.go:334] "Generic (PLEG): container finished" podID="1c3ea74e-377e-4d51-b82a-33e8ea14cf59" containerID="f14516959e0d4560fd8f632e1b8fa63c6e21e1d7fd0eea56835cf9b1f31e60d2" exitCode=0 Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.995305 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" event={"ID":"1c3ea74e-377e-4d51-b82a-33e8ea14cf59","Type":"ContainerDied","Data":"f14516959e0d4560fd8f632e1b8fa63c6e21e1d7fd0eea56835cf9b1f31e60d2"} Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.995328 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" event={"ID":"1c3ea74e-377e-4d51-b82a-33e8ea14cf59","Type":"ContainerStarted","Data":"bccff0c8bcde899069287960e1319b6e6848590d09bf644dde57ea138a6af6e2"} Feb 02 22:36:36 crc kubenswrapper[4755]: I0202 22:36:36.997928 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-rwshx" event={"ID":"0124b915-2ac4-4be7-b356-bf78a8295d9d","Type":"ContainerStarted","Data":"0e49a6d6b04d60ab110958c19cbec811b81e0d7e3b7c6dc461753113ace371e1"} Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.002065 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" event={"ID":"4730daad-9b5f-4e27-a9d2-8a989d2c40e8","Type":"ContainerStarted","Data":"6bc1673eb78d5320c11a1516eef99385d3789b19772cd118b94ce4abf7e95487"} Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.012112 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.013491 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n7fz\" (UniqueName: \"kubernetes.io/projected/60190c83-51d0-42d2-985b-6f790587622e-kube-api-access-7n7fz\") pod \"olm-operator-6b444d44fb-hg7t4\" (UID: \"60190c83-51d0-42d2-985b-6f790587622e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.023956 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.038573 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsdgw\" (UniqueName: \"kubernetes.io/projected/fa9699fd-2f6b-460d-9ca7-95ebba8a5d72-kube-api-access-zsdgw\") pod \"catalog-operator-68c6474976-gv5dm\" (UID: \"fa9699fd-2f6b-460d-9ca7-95ebba8a5d72\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.052620 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.052820 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jt8x\" (UniqueName: \"kubernetes.io/projected/47a9f45a-7c7f-41fb-b112-6e43666abe9b-kube-api-access-8jt8x\") pod \"dns-default-4dnjt\" (UID: \"47a9f45a-7c7f-41fb-b112-6e43666abe9b\") " pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:37 crc kubenswrapper[4755]: E0202 22:36:37.052961 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:37.552948941 +0000 UTC m=+153.244169267 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.057409 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.068006 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-z7g7z"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.069468 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.078276 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbkkr\" (UniqueName: \"kubernetes.io/projected/56ca6971-8e8a-485d-886d-f5c8eadef3de-kube-api-access-cbkkr\") pod \"openshift-config-operator-7777fb866f-5hjgs\" (UID: \"56ca6971-8e8a-485d-886d-f5c8eadef3de\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.084272 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.092400 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.095268 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5nv2v"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.096513 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.101664 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.101966 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl6rv\" (UniqueName: \"kubernetes.io/projected/467ef27d-8f51-4317-80ee-9071d7024f86-kube-api-access-dl6rv\") pod \"marketplace-operator-79b997595-hnz5f\" (UID: \"467ef27d-8f51-4317-80ee-9071d7024f86\") " pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.113986 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.130086 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.134969 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4p9r\" (UniqueName: \"kubernetes.io/projected/ac249dca-286c-4d91-8782-2d3b0676dc68-kube-api-access-z4p9r\") pod \"machine-config-controller-84d6567774-4n2db\" (UID: \"ac249dca-286c-4d91-8782-2d3b0676dc68\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.140725 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vdz28" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.152129 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.153218 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:37 crc kubenswrapper[4755]: E0202 22:36:37.155110 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:37.655087072 +0000 UTC m=+153.346307398 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.160037 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbvtz\" (UniqueName: \"kubernetes.io/projected/f6b1859b-cd07-469f-be86-e691a81d5b85-kube-api-access-jbvtz\") pod \"control-plane-machine-set-operator-78cbb6b69f-h2gd9\" (UID: \"f6b1859b-cd07-469f-be86-e691a81d5b85\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.177760 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-455qd\" (UniqueName: \"kubernetes.io/projected/d61538e7-e186-4a77-a96a-9d4d9d514837-kube-api-access-455qd\") pod \"router-default-5444994796-2x78p\" (UID: \"d61538e7-e186-4a77-a96a-9d4d9d514837\") " pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.191357 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.255185 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:37 crc kubenswrapper[4755]: E0202 22:36:37.255587 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:37.755574785 +0000 UTC m=+153.446795111 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.286264 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.303798 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.324423 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b5zvf"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.326531 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.340542 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.344021 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-s6v4j"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.345264 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lrgth"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.349514 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.355965 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:37 crc kubenswrapper[4755]: E0202 22:36:37.356363 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:37.856347866 +0000 UTC m=+153.547568182 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.382137 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:37 crc kubenswrapper[4755]: W0202 22:36:37.420520 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc61f342b_cd14_408f_8c6e_e65cee1ebb39.slice/crio-b3cae06e8ac5b4e0a2d4c72f7aebf8004306b70901e2d0ad511fa8e66497f201 WatchSource:0}: Error finding container b3cae06e8ac5b4e0a2d4c72f7aebf8004306b70901e2d0ad511fa8e66497f201: Status 404 returned error can't find the container with id b3cae06e8ac5b4e0a2d4c72f7aebf8004306b70901e2d0ad511fa8e66497f201 Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.438568 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.438607 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.453242 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-ttsv6"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.453808 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.456981 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:37 crc kubenswrapper[4755]: E0202 22:36:37.457374 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:37.957359534 +0000 UTC m=+153.648579860 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.472302 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj"] Feb 02 22:36:37 crc kubenswrapper[4755]: W0202 22:36:37.510424 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1713b1df_6fc2_4060_91b9_e631ad9b335f.slice/crio-c9590d290d4e5f998ec2122733f834592ac9245ef42a49677a2a757f8ed02441 WatchSource:0}: Error finding container c9590d290d4e5f998ec2122733f834592ac9245ef42a49677a2a757f8ed02441: Status 404 returned error can't find the container with id c9590d290d4e5f998ec2122733f834592ac9245ef42a49677a2a757f8ed02441 Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.537511 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.558014 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.559334 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:37 crc kubenswrapper[4755]: E0202 22:36:37.562341 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.062287034 +0000 UTC m=+153.753507360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.563439 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:37 crc kubenswrapper[4755]: E0202 22:36:37.564258 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.06424649 +0000 UTC m=+153.755466816 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.582698 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-lkd4r"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.608425 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vdz28"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.647285 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.665323 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:37 crc kubenswrapper[4755]: E0202 22:36:37.665449 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.165411233 +0000 UTC m=+153.856631559 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.666231 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:37 crc kubenswrapper[4755]: E0202 22:36:37.667747 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.167735859 +0000 UTC m=+153.858956185 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.700961 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.705252 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.714719 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-dgrbx"] Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.769771 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:37 crc kubenswrapper[4755]: E0202 22:36:37.770830 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.270808197 +0000 UTC m=+153.962028523 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:37 crc kubenswrapper[4755]: W0202 22:36:37.822138 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod099069af_444c_4cc1_8c7f_786a4a64aedb.slice/crio-2245074d97e21962a7560425d390ee6ce9b8e061c3ed5a0912b42de22f74ba1b WatchSource:0}: Error finding container 2245074d97e21962a7560425d390ee6ce9b8e061c3ed5a0912b42de22f74ba1b: Status 404 returned error can't find the container with id 2245074d97e21962a7560425d390ee6ce9b8e061c3ed5a0912b42de22f74ba1b Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.871558 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:37 crc kubenswrapper[4755]: E0202 22:36:37.873314 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.373302037 +0000 UTC m=+154.064522353 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:37 crc kubenswrapper[4755]: I0202 22:36:37.972456 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:37 crc kubenswrapper[4755]: E0202 22:36:37.973999 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.473194383 +0000 UTC m=+154.164414709 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.050119 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hnz5f"] Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.051940 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4"] Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.065930 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j"] Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.074028 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:38 crc kubenswrapper[4755]: E0202 22:36:38.074313 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.574303574 +0000 UTC m=+154.265523900 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.088216 4755 generic.go:334] "Generic (PLEG): container finished" podID="19316443-6a89-4993-b196-1de2bece6e84" containerID="df0737bb5b918cb5b019f3cc97e1067cf03fb038cf47de43abe88ddd9f502971" exitCode=0 Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.088371 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" event={"ID":"19316443-6a89-4993-b196-1de2bece6e84","Type":"ContainerDied","Data":"df0737bb5b918cb5b019f3cc97e1067cf03fb038cf47de43abe88ddd9f502971"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.130259 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-gn7kw"] Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.145830 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" podStartSLOduration=128.145798968 podStartE2EDuration="2m8.145798968s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:38.144413639 +0000 UTC m=+153.835633975" watchObservedRunningTime="2026-02-02 22:36:38.145798968 +0000 UTC m=+153.837019294" Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.153596 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" event={"ID":"21a3e3ee-831b-450a-b0da-13551b7353e4","Type":"ContainerStarted","Data":"a135165f770fecb7568bcf04f095d6c450e1d4bb02675cfd9a91198dec735570"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.175446 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:38 crc kubenswrapper[4755]: E0202 22:36:38.175879 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.675852418 +0000 UTC m=+154.367072744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.176394 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:38 crc kubenswrapper[4755]: E0202 22:36:38.176632 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.67662138 +0000 UTC m=+154.367841706 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.197082 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-4dnjt"] Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.208110 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5"] Feb 02 22:36:38 crc kubenswrapper[4755]: W0202 22:36:38.227083 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60190c83_51d0_42d2_985b_6f790587622e.slice/crio-63c188e627f6d9186a35a722aee3d17931d3458a2101ef1da528892cca206c51 WatchSource:0}: Error finding container 63c188e627f6d9186a35a722aee3d17931d3458a2101ef1da528892cca206c51: Status 404 returned error can't find the container with id 63c188e627f6d9186a35a722aee3d17931d3458a2101ef1da528892cca206c51 Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.234954 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" event={"ID":"0a4a5bd4-3691-4bce-9266-3d3a05dae585","Type":"ContainerStarted","Data":"da867843d87b19fff60ed90e9b2362d13a25a3e3ce9e16cba90f845e13c5afdd"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.244368 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" event={"ID":"8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1","Type":"ContainerStarted","Data":"3ddf1ce3e5821facf0e3f507b35bc90ab7209bbadb70014a0c4527ef02c9bc2a"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.256115 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" event={"ID":"1c3ea74e-377e-4d51-b82a-33e8ea14cf59","Type":"ContainerStarted","Data":"93a912a79c157286bfbb7b5db898b73c75421f8d028137dff9f49b103b0ba669"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.262896 4755 csr.go:261] certificate signing request csr-q4mq8 is approved, waiting to be issued Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.277312 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:38 crc kubenswrapper[4755]: E0202 22:36:38.278223 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.778206644 +0000 UTC m=+154.469426960 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.283934 4755 csr.go:257] certificate signing request csr-q4mq8 is issued Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.284489 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" event={"ID":"1b45fccf-2dc0-4fab-8a28-76127a690f13","Type":"ContainerStarted","Data":"290e0153350f751391d3411f4d96302abaafd090bda6650496fd5b8fa766635a"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.371037 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" podStartSLOduration=127.371018118 podStartE2EDuration="2m7.371018118s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:38.369181845 +0000 UTC m=+154.060402181" watchObservedRunningTime="2026-02-02 22:36:38.371018118 +0000 UTC m=+154.062238444" Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.378135 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:38 crc kubenswrapper[4755]: E0202 22:36:38.378432 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.87842147 +0000 UTC m=+154.569641796 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.378938 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" event={"ID":"1713b1df-6fc2-4060-91b9-e631ad9b335f","Type":"ContainerStarted","Data":"c9590d290d4e5f998ec2122733f834592ac9245ef42a49677a2a757f8ed02441"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.390111 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" event={"ID":"21bfe782-2848-4171-8919-e1ce96150a09","Type":"ContainerStarted","Data":"e738068c37e8338e18d589143edc8d6be487e55b2c3d450be21d292d20d2ab9a"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.393632 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" event={"ID":"0aec75ad-50ac-47c3-ad95-efd5b76ed561","Type":"ContainerStarted","Data":"118a866b35e763e482301b059838368d22699ebcefd7658cb782de2b13042ab5"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.405027 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vdz28" event={"ID":"96042d4b-00db-43c7-9f94-203fa722a690","Type":"ContainerStarted","Data":"b4944c5123cb48ddefea404c406b9d73c0592b032ecafcfa0ff7e618b63503ef"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.425921 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db"] Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.431441 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs"] Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.446478 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-n987n" event={"ID":"a6f21874-ee8f-4718-b2ab-8b4a97543364","Type":"ContainerStarted","Data":"5822479225fc39e5208520b955aef1a0dcf1de3cf208958e1d35e61bd27eed98"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.446939 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-n987n" Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.451518 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.451562 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.451646 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" event={"ID":"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc","Type":"ContainerStarted","Data":"2942c6f2ea07561cf887ce34cad88d61a5739bd787ad4170964cf545a9e663f1"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.452003 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.452424 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm"] Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.454166 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" event={"ID":"b9deda20-3739-4585-a197-f51ce9a63b8c","Type":"ContainerStarted","Data":"ae51dc7e1ea3750869f421618b1590bf5861493e18377b36e506591d8fddb83d"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.455468 4755 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-5nv2v container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.18:6443/healthz\": dial tcp 10.217.0.18:6443: connect: connection refused" start-of-body= Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.456355 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" podUID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.18:6443/healthz\": dial tcp 10.217.0.18:6443: connect: connection refused" Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.457363 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" event={"ID":"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4","Type":"ContainerStarted","Data":"dc0fc2809120a387adaf40fbd0c21995b4478b767dc71a681bdb4b05f2d0b671"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.457767 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9"] Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.458206 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-lkd4r" event={"ID":"439a2e89-139f-4356-ac5f-6325ac0c2a92","Type":"ContainerStarted","Data":"3189a43dfabee5e1a05583b93bccca6053ba8b96b583b303ba4843040d383fc6"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.460865 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z7g7z" event={"ID":"938066f1-bde4-4bbc-ad80-47379f6a66ff","Type":"ContainerStarted","Data":"062fc556fa4becd194d900f41ead85f092d199376499c5ca5f354bf668fb0f7a"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.460907 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z7g7z" event={"ID":"938066f1-bde4-4bbc-ad80-47379f6a66ff","Type":"ContainerStarted","Data":"00b70ccfa2eb5cef3a7012482c697f6147b19eea452544b15a3275a230a9fd9b"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.464293 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-2x78p" event={"ID":"d61538e7-e186-4a77-a96a-9d4d9d514837","Type":"ContainerStarted","Data":"27919f640e7c781e5c7ab775580cc7e41f4516e867933ede64e70c7459aea3d1"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.467681 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" event={"ID":"322e7205-53dc-4148-b16b-5989623d4cb6","Type":"ContainerStarted","Data":"b749f4c8a3ac75a4d7670af82df7758f9eda4f1add5ffe6f46e4221d0968f1b7"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.469322 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-s9mjc" event={"ID":"1812beb5-4070-4649-8b5b-e78e5257c3dc","Type":"ContainerStarted","Data":"1c7e6edb66c32970279e479025a62fcedf8a648fa9217b1db42297e62d3134fc"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.473909 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-b5zvf" event={"ID":"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2","Type":"ContainerStarted","Data":"64bca2cafececf11547bcc56b8885a51805e96606ab24977bcac65562dafc5d8"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.474243 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.479090 4755 patch_prober.go:28] interesting pod/console-operator-58897d9998-b5zvf container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.479130 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-b5zvf" podUID="5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.483001 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:38 crc kubenswrapper[4755]: E0202 22:36:38.484304 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:38.984287856 +0000 UTC m=+154.675508172 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.498139 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-7qkqs" event={"ID":"4730daad-9b5f-4e27-a9d2-8a989d2c40e8","Type":"ContainerStarted","Data":"23dbd61a843879a9975aee348de90b63cfaafaf892df7d1637375464e81d1d6a"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.501550 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" event={"ID":"7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7","Type":"ContainerStarted","Data":"cc80ad0987a7eb73629c745739a6b53b2efb828fc92bc87de0d97f4c060442ca"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.503329 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t" event={"ID":"79899b57-242d-45e0-8527-2af257b8a5b7","Type":"ContainerStarted","Data":"13842e35839ee419b964e91f1f004a3afe03837a38a05e4f7cd47d445c9c3832"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.505403 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" event={"ID":"c61f342b-cd14-408f-8c6e-e65cee1ebb39","Type":"ContainerStarted","Data":"3e244dd01d5c15c93c649b2a0785f07a12fecf236cdfa690bda975e062894a0a"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.505425 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" event={"ID":"c61f342b-cd14-408f-8c6e-e65cee1ebb39","Type":"ContainerStarted","Data":"b3cae06e8ac5b4e0a2d4c72f7aebf8004306b70901e2d0ad511fa8e66497f201"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.508918 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" event={"ID":"49503265-4d0e-484f-8832-38be62c19af0","Type":"ContainerStarted","Data":"19d00a663d2730e92490835c80e1b2a75d2d56887208bcf2145cd96fa6f902d0"} Feb 02 22:36:38 crc kubenswrapper[4755]: W0202 22:36:38.514346 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac249dca_286c_4d91_8782_2d3b0676dc68.slice/crio-f866a240eaffb5ac88540303fbca2553eeaa2c775fd47feed4ad6ea61d5a9b77 WatchSource:0}: Error finding container f866a240eaffb5ac88540303fbca2553eeaa2c775fd47feed4ad6ea61d5a9b77: Status 404 returned error can't find the container with id f866a240eaffb5ac88540303fbca2553eeaa2c775fd47feed4ad6ea61d5a9b77 Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.515456 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" event={"ID":"09e605cd-967a-462f-8fad-1cf16ef64351","Type":"ContainerStarted","Data":"13b3776ea94e86b139d8572241e9bdb06828725cae1683fa3970ebfd79a30e83"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.516182 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.518213 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" event={"ID":"d98f0d35-15ef-4d83-9c0b-104f44f4ae41","Type":"ContainerStarted","Data":"8f7548d50970958eaf1704d9b77f59c6cd70b493630de3ebb1bec37ebdf9f1e4"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.519431 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" event={"ID":"099069af-444c-4cc1-8c7f-786a4a64aedb","Type":"ContainerStarted","Data":"2245074d97e21962a7560425d390ee6ce9b8e061c3ed5a0912b42de22f74ba1b"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.520223 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6t4wf" event={"ID":"411c26b8-a425-41a4-b05c-3bdbd49f6e69","Type":"ContainerStarted","Data":"fa1f53a72453b0dbd61dea0a55bd07f8d2f133d0d98e1dc6c114d09955fe66bb"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.524332 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-rwshx" event={"ID":"0124b915-2ac4-4be7-b356-bf78a8295d9d","Type":"ContainerStarted","Data":"b054a350a4956321b512d82c2a40f0148c3cbe1b5a3613f3a6240087df8b4cca"} Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.524433 4755 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-lrgth container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.524463 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" podUID="09e605cd-967a-462f-8fad-1cf16ef64351" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.529650 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:36:38 crc kubenswrapper[4755]: W0202 22:36:38.539832 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56ca6971_8e8a_485d_886d_f5c8eadef3de.slice/crio-f92c06db162f2b683da4b906a2428d118e1742312c7d4355dcd350b00e9e5126 WatchSource:0}: Error finding container f92c06db162f2b683da4b906a2428d118e1742312c7d4355dcd350b00e9e5126: Status 404 returned error can't find the container with id f92c06db162f2b683da4b906a2428d118e1742312c7d4355dcd350b00e9e5126 Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.585645 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:38 crc kubenswrapper[4755]: E0202 22:36:38.586009 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:39.085993114 +0000 UTC m=+154.777213440 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.688801 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:38 crc kubenswrapper[4755]: E0202 22:36:38.688982 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:39.188965789 +0000 UTC m=+154.880186115 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.689410 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:38 crc kubenswrapper[4755]: E0202 22:36:38.691550 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:39.191533202 +0000 UTC m=+154.882753528 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.790773 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:38 crc kubenswrapper[4755]: E0202 22:36:38.790952 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:39.290930104 +0000 UTC m=+154.982150430 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.793901 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:38 crc kubenswrapper[4755]: E0202 22:36:38.794214 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:39.294202068 +0000 UTC m=+154.985422394 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:38 crc kubenswrapper[4755]: I0202 22:36:38.896209 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:38 crc kubenswrapper[4755]: E0202 22:36:38.896783 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:39.3967682 +0000 UTC m=+155.087988526 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:38.997381 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:38.997653 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:39.497642614 +0000 UTC m=+155.188862940 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.098351 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.098748 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:39.598732775 +0000 UTC m=+155.289953101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.199813 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.200568 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:39.700552416 +0000 UTC m=+155.391772742 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.202421 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" podStartSLOduration=129.202401649 podStartE2EDuration="2m9.202401649s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.201252416 +0000 UTC m=+154.892472742" watchObservedRunningTime="2026-02-02 22:36:39.202401649 +0000 UTC m=+154.893621975" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.203418 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" podStartSLOduration=128.203409898 podStartE2EDuration="2m8.203409898s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.174065069 +0000 UTC m=+154.865285395" watchObservedRunningTime="2026-02-02 22:36:39.203409898 +0000 UTC m=+154.894630224" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.284868 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-02 22:31:38 +0000 UTC, rotation deadline is 2026-10-29 15:24:16.672164691 +0000 UTC Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.284914 4755 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6448h47m37.387252643s for next certificate rotation Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.295443 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" podStartSLOduration=129.295426059 podStartE2EDuration="2m9.295426059s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.256142236 +0000 UTC m=+154.947362572" watchObservedRunningTime="2026-02-02 22:36:39.295426059 +0000 UTC m=+154.986646385" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.296326 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fh5rw" podStartSLOduration=129.296321844 podStartE2EDuration="2m9.296321844s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.295663906 +0000 UTC m=+154.986884222" watchObservedRunningTime="2026-02-02 22:36:39.296321844 +0000 UTC m=+154.987542170" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.301912 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.302105 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:39.802052798 +0000 UTC m=+155.493273124 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.302294 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.302615 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:39.802608494 +0000 UTC m=+155.493828820 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.371105 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-b5zvf" podStartSLOduration=129.371071862 podStartE2EDuration="2m9.371071862s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.367300684 +0000 UTC m=+155.058521030" watchObservedRunningTime="2026-02-02 22:36:39.371071862 +0000 UTC m=+155.062292188" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.405220 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.406018 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:39.90599854 +0000 UTC m=+155.597218866 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.420219 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z7g7z" podStartSLOduration=128.420198786 podStartE2EDuration="2m8.420198786s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.419850796 +0000 UTC m=+155.111071112" watchObservedRunningTime="2026-02-02 22:36:39.420198786 +0000 UTC m=+155.111419122" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.456287 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-6t4wf" podStartSLOduration=5.456266448 podStartE2EDuration="5.456266448s" podCreationTimestamp="2026-02-02 22:36:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.452178321 +0000 UTC m=+155.143398657" watchObservedRunningTime="2026-02-02 22:36:39.456266448 +0000 UTC m=+155.147486784" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.483423 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-bwhck" podStartSLOduration=129.483407384 podStartE2EDuration="2m9.483407384s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.482973681 +0000 UTC m=+155.174194007" watchObservedRunningTime="2026-02-02 22:36:39.483407384 +0000 UTC m=+155.174627710" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.509066 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.509470 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.009456968 +0000 UTC m=+155.700677294 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.545720 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" event={"ID":"c61f342b-cd14-408f-8c6e-e65cee1ebb39","Type":"ContainerStarted","Data":"713b91fba6ae3b1bc1cd01055cc9639060ae447ed754d25209b9e51d23dfb85e"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.546863 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-n987n" podStartSLOduration=129.546849667 podStartE2EDuration="2m9.546849667s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.545936071 +0000 UTC m=+155.237156397" watchObservedRunningTime="2026-02-02 22:36:39.546849667 +0000 UTC m=+155.238069993" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.557628 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9" event={"ID":"f6b1859b-cd07-469f-be86-e691a81d5b85","Type":"ContainerStarted","Data":"1a4def0fedd6a43d1fde44dbf0102249d9a37ae867cdba9b1f1ba716652d291e"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.557696 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9" event={"ID":"f6b1859b-cd07-469f-be86-e691a81d5b85","Type":"ContainerStarted","Data":"c6749ae4f8d5e5e215439f7290dcc9885bd3bad856b6ea0f7adba501ea4a2c7f"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.563557 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" event={"ID":"fa9699fd-2f6b-460d-9ca7-95ebba8a5d72","Type":"ContainerStarted","Data":"43120b3d655b1f2553c21849a7e8fc5f76a7ab5f0abde7022741db294bd27e9d"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.563617 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" event={"ID":"fa9699fd-2f6b-460d-9ca7-95ebba8a5d72","Type":"ContainerStarted","Data":"b67eed042bbe56960ff70f50b07fb34635177eae51021223c2649b89b4955c21"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.564085 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.574083 4755 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-gv5dm container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.574152 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" podUID="fa9699fd-2f6b-460d-9ca7-95ebba8a5d72" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.577269 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vdz28" event={"ID":"96042d4b-00db-43c7-9f94-203fa722a690","Type":"ContainerStarted","Data":"9334cbf3f554a9b7efd491a50a6f7ec8aaacd5d1ea6b26bd38fa62597300760a"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.582650 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-rwshx" podStartSLOduration=129.582624989 podStartE2EDuration="2m9.582624989s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.578271255 +0000 UTC m=+155.269491581" watchObservedRunningTime="2026-02-02 22:36:39.582624989 +0000 UTC m=+155.273845325" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.599683 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" event={"ID":"7b8d098a-b26f-4de7-94a4-5ad6e2cd60d7","Type":"ContainerStarted","Data":"ecbd8d12fc0bbd024db31fa00cb6a35e088b51f28df4197228cad3c2b953d808"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.604120 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-49pm9" podStartSLOduration=128.604101984 podStartE2EDuration="2m8.604101984s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.600669105 +0000 UTC m=+155.291889441" watchObservedRunningTime="2026-02-02 22:36:39.604101984 +0000 UTC m=+155.295322310" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.607079 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" event={"ID":"ca9ec072-2c92-4631-8c7e-1d5b59f85076","Type":"ContainerStarted","Data":"ef60ecf29be54aa859d014932930e7f6fc9a2cb951bad59c1460bf2cb2ed1d20"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.607126 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" event={"ID":"ca9ec072-2c92-4631-8c7e-1d5b59f85076","Type":"ContainerStarted","Data":"6716a61993d5c1651ee73747ec255507581591f8e4aa37e69870cc32a46901f8"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.607940 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.617046 4755 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-sjx8j container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" start-of-body= Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.617102 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" podUID="ca9ec072-2c92-4631-8c7e-1d5b59f85076" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.617652 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.617834 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.117809855 +0000 UTC m=+155.809030181 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.617960 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.618240 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.118233058 +0000 UTC m=+155.809453384 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.621619 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-b5zvf" event={"ID":"5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2","Type":"ContainerStarted","Data":"3cdfcba18fbf1c86fd0527039dff196ef868704ff76816d37eaf107c40ec4226"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.622514 4755 patch_prober.go:28] interesting pod/console-operator-58897d9998-b5zvf container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.622554 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-b5zvf" podUID="5cac64ea-ba01-46b0-becb-1fc1f7f1fbb2" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.625887 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" event={"ID":"209646e7-0d11-4f45-9222-ca6ac4d92268","Type":"ContainerStarted","Data":"3674dd9b3d1a602ee51be108e34f21a357f326aa557e027411a3dea171f8168d"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.629483 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-s9mjc" event={"ID":"1812beb5-4070-4649-8b5b-e78e5257c3dc","Type":"ContainerStarted","Data":"81dba208e508079a8cbfb649805696ed05ba282f7a3d8dcb614a6a6cc96578d7"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.632596 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" event={"ID":"8f5a7aaf-90b0-4895-9d69-0b5df2ae76d1","Type":"ContainerStarted","Data":"b0e144f2407e31d71ad1eded4ee72d27097a2eb6721c8efd603904ba3b0ce712"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.634224 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" event={"ID":"467ef27d-8f51-4317-80ee-9071d7024f86","Type":"ContainerStarted","Data":"3a7e23b649f364d0d3ee753c085c1a297d22374e6a1c7c434e47a67c95325cd8"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.634524 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" event={"ID":"467ef27d-8f51-4317-80ee-9071d7024f86","Type":"ContainerStarted","Data":"f7648f6029f3757bd1dbb72c51d40345d84b104d001ec656ba1ee6ef7b1afd7c"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.634540 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.635980 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" event={"ID":"1b45fccf-2dc0-4fab-8a28-76127a690f13","Type":"ContainerStarted","Data":"728eb23ce727058531781b5aea1111521ef1df5ae65dc22cdb61044aecc06f4e"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.637172 4755 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hnz5f container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.637205 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" podUID="467ef27d-8f51-4317-80ee-9071d7024f86" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.637845 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" event={"ID":"b9deda20-3739-4585-a197-f51ce9a63b8c","Type":"ContainerStarted","Data":"1a882bf7823eee1969f1d8f8db00b671c34d20ca5257b84515f0f93f16cfb731"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.654427 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-2x78p" event={"ID":"d61538e7-e186-4a77-a96a-9d4d9d514837","Type":"ContainerStarted","Data":"e82e5ba49b7340ac8fdb05dcbb70399e6a277925c9c126d95d793ab23fe40bd5"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.661041 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-lkd4r" event={"ID":"439a2e89-139f-4356-ac5f-6325ac0c2a92","Type":"ContainerStarted","Data":"cc31acb1dca0a0a8ef8efd412231a5ff14e2c6e631791c0ac7bdc97d691e0de7"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.668345 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" event={"ID":"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc","Type":"ContainerStarted","Data":"ced2195515b0330332aeab79cae8f611a29220e18873ac65413cfdbfb7d57abd"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.669589 4755 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-5nv2v container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.18:6443/healthz\": dial tcp 10.217.0.18:6443: connect: connection refused" start-of-body= Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.669617 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" podUID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.18:6443/healthz\": dial tcp 10.217.0.18:6443: connect: connection refused" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.675770 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" event={"ID":"ac249dca-286c-4d91-8782-2d3b0676dc68","Type":"ContainerStarted","Data":"f4a80a2876c8875012cfdd4e01850bcf49bebd7fc8f63a3b54e5edbd2b7ba41f"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.675812 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" event={"ID":"ac249dca-286c-4d91-8782-2d3b0676dc68","Type":"ContainerStarted","Data":"f866a240eaffb5ac88540303fbca2553eeaa2c775fd47feed4ad6ea61d5a9b77"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.677380 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dx54k" event={"ID":"21bfe782-2848-4171-8919-e1ce96150a09","Type":"ContainerStarted","Data":"3bbbf88d35e38d7a7ed888dff6e21b4a0592b756062cb17772e91e6a0a75af54"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.687028 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t" event={"ID":"79899b57-242d-45e0-8527-2af257b8a5b7","Type":"ContainerStarted","Data":"7dd769d7347a5c16a9cc5da346b343e1c9bbf37f536e05a71b1963d1a891a6bd"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.687347 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t" event={"ID":"79899b57-242d-45e0-8527-2af257b8a5b7","Type":"ContainerStarted","Data":"d7f0a812f3e0337d1f71b561cd4a74a46f85c42d153cc64aacd7f9f8f2706f01"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.690753 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" podStartSLOduration=128.690743741 podStartE2EDuration="2m8.690743741s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.688854267 +0000 UTC m=+155.380074603" watchObservedRunningTime="2026-02-02 22:36:39.690743741 +0000 UTC m=+155.381964067" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.692743 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" event={"ID":"1713b1df-6fc2-4060-91b9-e631ad9b335f","Type":"ContainerStarted","Data":"d5b2e73ed2b1f5b0c4f1a54b68b7a9c1a49402032d9110e306b3d400eac71d13"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.692792 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" event={"ID":"1713b1df-6fc2-4060-91b9-e631ad9b335f","Type":"ContainerStarted","Data":"b0c8b19d93c9e678e94683f8e7566f076f1c5137970c07935a26b6897b92fa4a"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.697695 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" event={"ID":"2d480358-faea-430f-97ad-c49f7878007b","Type":"ContainerStarted","Data":"7ccbfeee59ba0ed97fbce6a9cebca08514e7e81f082d93ca1693781c8081d75c"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.697875 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" event={"ID":"2d480358-faea-430f-97ad-c49f7878007b","Type":"ContainerStarted","Data":"a24e551c92d643fa9bedcb3f73f4ba5f9db4b924914601ab5ce78f6db0821f71"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.705845 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-4dnjt" event={"ID":"47a9f45a-7c7f-41fb-b112-6e43666abe9b","Type":"ContainerStarted","Data":"77f499c6abc3b81b449a59734ab69f6a2a66a9875a38fe11e6fd83ce63992269"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.705904 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-4dnjt" event={"ID":"47a9f45a-7c7f-41fb-b112-6e43666abe9b","Type":"ContainerStarted","Data":"29b9fab6a1809c9bc4c282342a662f4eabf492c7631e389295da900db95e4d28"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.718681 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.718847 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.218823384 +0000 UTC m=+155.910043710 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.719123 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.720539 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.220527772 +0000 UTC m=+155.911748098 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.728200 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" event={"ID":"322e7205-53dc-4148-b16b-5989623d4cb6","Type":"ContainerStarted","Data":"aa45ff10afd1b69d34f745567017f6c696d4bd140bffe1a05859c5bbe2538f2f"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.730458 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw2ht" podStartSLOduration=128.730436236 podStartE2EDuration="2m8.730436236s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.728319755 +0000 UTC m=+155.419540081" watchObservedRunningTime="2026-02-02 22:36:39.730436236 +0000 UTC m=+155.421656562" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.745311 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" event={"ID":"d98f0d35-15ef-4d83-9c0b-104f44f4ae41","Type":"ContainerStarted","Data":"71a4e7ca70568395ec29a6df6ff56a29f4389378528178133740ec589ab8ffcb"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.757205 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" event={"ID":"0a4a5bd4-3691-4bce-9266-3d3a05dae585","Type":"ContainerStarted","Data":"54665931dc498d13774f66afb1f0f1454da6271608b8d7b077a41d2d8dd94d89"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.783440 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" event={"ID":"1c3ea74e-377e-4d51-b82a-33e8ea14cf59","Type":"ContainerStarted","Data":"5b620903504eca39f8f9a34404e2debfc8f01cf7f98ec1ffd520cd4b216ced35"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.796347 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" event={"ID":"099069af-444c-4cc1-8c7f-786a4a64aedb","Type":"ContainerStarted","Data":"5706c273a010ee408a4007f81dbb7eea9b6793739a1554ed47f5987c807d0757"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.796417 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" event={"ID":"099069af-444c-4cc1-8c7f-786a4a64aedb","Type":"ContainerStarted","Data":"8e3a6a886c47bd2889a1b30beb6ccb57f3bceed7dcf0ad58b035b29c1cd10396"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.801968 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" event={"ID":"09e605cd-967a-462f-8fad-1cf16ef64351","Type":"ContainerStarted","Data":"bfe4c5c8941e89bdb4dc0a161815c6045571e3820de4985581df364d35a76229"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.802630 4755 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-lrgth container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.802667 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" podUID="09e605cd-967a-462f-8fad-1cf16ef64351" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.803881 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nlddn" podStartSLOduration=128.803868685 podStartE2EDuration="2m8.803868685s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.777613905 +0000 UTC m=+155.468834241" watchObservedRunningTime="2026-02-02 22:36:39.803868685 +0000 UTC m=+155.495089011" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.810145 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" event={"ID":"19316443-6a89-4993-b196-1de2bece6e84","Type":"ContainerStarted","Data":"e29960b63a2218a13cbd9ce88be072153d2aae90d2dff2c0d5a3df8b48644805"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.818808 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-z7g7z" event={"ID":"938066f1-bde4-4bbc-ad80-47379f6a66ff","Type":"ContainerStarted","Data":"866e30a796b5cce369e906cf4cd15aaf2bb3aa1aa75ea3cb172a95e0e2e1eff0"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.820118 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.820459 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.320437069 +0000 UTC m=+156.011657395 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.821178 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.824657 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" event={"ID":"56ca6971-8e8a-485d-886d-f5c8eadef3de","Type":"ContainerStarted","Data":"87c536cd5f40de70b6b28045bb6f7686a07bbee1181c2d07aec212dbf3395adf"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.824691 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" event={"ID":"56ca6971-8e8a-485d-886d-f5c8eadef3de","Type":"ContainerStarted","Data":"f92c06db162f2b683da4b906a2428d118e1742312c7d4355dcd350b00e9e5126"} Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.825266 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.325253257 +0000 UTC m=+156.016473583 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.835601 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" event={"ID":"82d5835c-9cdf-4cf8-af7d-ae4abd9fb3b4","Type":"ContainerStarted","Data":"709e3e6abe198bd15afac67c7330932151ea61bdc18634f67e817751d4bd0d7a"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.862035 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" event={"ID":"0aec75ad-50ac-47c3-ad95-efd5b76ed561","Type":"ContainerStarted","Data":"b0f598646154629415402d4bc85bfe28675f31c3a83caf93672987753dce7415"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.862082 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" event={"ID":"0aec75ad-50ac-47c3-ad95-efd5b76ed561","Type":"ContainerStarted","Data":"83074f149e4f0602b08fc789a790ba66d5d23b7004a3b81248b7f84631765fb1"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.862759 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.863402 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" podStartSLOduration=128.863392417 podStartE2EDuration="2m8.863392417s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.863047177 +0000 UTC m=+155.554267513" watchObservedRunningTime="2026-02-02 22:36:39.863392417 +0000 UTC m=+155.554612743" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.864504 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-s9mjc" podStartSLOduration=128.864498719 podStartE2EDuration="2m8.864498719s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.810370101 +0000 UTC m=+155.501590457" watchObservedRunningTime="2026-02-02 22:36:39.864498719 +0000 UTC m=+155.555719045" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.881058 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" event={"ID":"60190c83-51d0-42d2-985b-6f790587622e","Type":"ContainerStarted","Data":"4ba64913c3cf37689fc80850a96e873ef94be03bbd55f5ecc012ff6f6c8c16cf"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.881097 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" event={"ID":"60190c83-51d0-42d2-985b-6f790587622e","Type":"ContainerStarted","Data":"63c188e627f6d9186a35a722aee3d17931d3458a2101ef1da528892cca206c51"} Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.881112 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.881820 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.881866 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.899666 4755 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-hg7t4 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.899739 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" podUID="60190c83-51d0-42d2-985b-6f790587622e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.928255 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.928718 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.428687494 +0000 UTC m=+156.119907820 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.928971 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:39 crc kubenswrapper[4755]: E0202 22:36:39.932000 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.431957048 +0000 UTC m=+156.123177454 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.945227 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" podStartSLOduration=128.945210546 podStartE2EDuration="2m8.945210546s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.931858825 +0000 UTC m=+155.623079161" watchObservedRunningTime="2026-02-02 22:36:39.945210546 +0000 UTC m=+155.636430872" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.947912 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-dgrbx" podStartSLOduration=128.947899023 podStartE2EDuration="2m8.947899023s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.893963021 +0000 UTC m=+155.585183347" watchObservedRunningTime="2026-02-02 22:36:39.947899023 +0000 UTC m=+155.639119349" Feb 02 22:36:39 crc kubenswrapper[4755]: I0202 22:36:39.992268 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-vdz28" podStartSLOduration=6.992249211 podStartE2EDuration="6.992249211s" podCreationTimestamp="2026-02-02 22:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:39.992235051 +0000 UTC m=+155.683455377" watchObservedRunningTime="2026-02-02 22:36:39.992249211 +0000 UTC m=+155.683469537" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.031208 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.033715 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-s6v4j" podStartSLOduration=129.033702267 podStartE2EDuration="2m9.033702267s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.032830602 +0000 UTC m=+155.724050928" watchObservedRunningTime="2026-02-02 22:36:40.033702267 +0000 UTC m=+155.724922593" Feb 02 22:36:40 crc kubenswrapper[4755]: E0202 22:36:40.034355 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.534341525 +0000 UTC m=+156.225561851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.055453 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-h2gd9" podStartSLOduration=129.055435838 podStartE2EDuration="2m9.055435838s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.055070948 +0000 UTC m=+155.746291274" watchObservedRunningTime="2026-02-02 22:36:40.055435838 +0000 UTC m=+155.746656164" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.133834 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:40 crc kubenswrapper[4755]: E0202 22:36:40.134101 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.634088737 +0000 UTC m=+156.325309063 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.140024 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" podStartSLOduration=130.140009276 podStartE2EDuration="2m10.140009276s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.102941486 +0000 UTC m=+155.794161812" watchObservedRunningTime="2026-02-02 22:36:40.140009276 +0000 UTC m=+155.831229592" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.141280 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-ttsv6" podStartSLOduration=129.141272812 podStartE2EDuration="2m9.141272812s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.139674167 +0000 UTC m=+155.830894493" watchObservedRunningTime="2026-02-02 22:36:40.141272812 +0000 UTC m=+155.832493138" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.234723 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:40 crc kubenswrapper[4755]: E0202 22:36:40.234934 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.73490735 +0000 UTC m=+156.426127676 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.256803 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" podStartSLOduration=129.256785465 podStartE2EDuration="2m9.256785465s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.207172427 +0000 UTC m=+155.898392763" watchObservedRunningTime="2026-02-02 22:36:40.256785465 +0000 UTC m=+155.948005791" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.287200 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sl4x8" podStartSLOduration=129.287180524 podStartE2EDuration="2m9.287180524s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.257606389 +0000 UTC m=+155.948826715" watchObservedRunningTime="2026-02-02 22:36:40.287180524 +0000 UTC m=+155.978400850" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.331086 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69tbt" podStartSLOduration=129.331068699 podStartE2EDuration="2m9.331068699s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.287817282 +0000 UTC m=+155.979037608" watchObservedRunningTime="2026-02-02 22:36:40.331068699 +0000 UTC m=+156.022289025" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.335782 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:40 crc kubenswrapper[4755]: E0202 22:36:40.336305 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.836294259 +0000 UTC m=+156.527514585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.350793 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.352344 4755 patch_prober.go:28] interesting pod/router-default-5444994796-2x78p container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.352675 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2x78p" podUID="d61538e7-e186-4a77-a96a-9d4d9d514837" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.372450 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" podStartSLOduration=130.372429992 podStartE2EDuration="2m10.372429992s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.334086535 +0000 UTC m=+156.025306861" watchObservedRunningTime="2026-02-02 22:36:40.372429992 +0000 UTC m=+156.063650318" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.406543 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-qc97t" podStartSLOduration=130.406524727 podStartE2EDuration="2m10.406524727s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.371688451 +0000 UTC m=+156.062908787" watchObservedRunningTime="2026-02-02 22:36:40.406524727 +0000 UTC m=+156.097745053" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.407590 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fzwbz" podStartSLOduration=129.407583187 podStartE2EDuration="2m9.407583187s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.405115346 +0000 UTC m=+156.096335682" watchObservedRunningTime="2026-02-02 22:36:40.407583187 +0000 UTC m=+156.098803513" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.437326 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:40 crc kubenswrapper[4755]: E0202 22:36:40.437563 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.937534023 +0000 UTC m=+156.628754359 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.437937 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:40 crc kubenswrapper[4755]: E0202 22:36:40.438368 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:40.938348286 +0000 UTC m=+156.629568662 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.485155 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lwzgj" podStartSLOduration=129.485135644 podStartE2EDuration="2m9.485135644s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.482935211 +0000 UTC m=+156.174155557" watchObservedRunningTime="2026-02-02 22:36:40.485135644 +0000 UTC m=+156.176355970" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.486438 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" podStartSLOduration=129.486428651 podStartE2EDuration="2m9.486428651s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.445026347 +0000 UTC m=+156.136246683" watchObservedRunningTime="2026-02-02 22:36:40.486428651 +0000 UTC m=+156.177648977" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.536652 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" podStartSLOduration=129.536633377 podStartE2EDuration="2m9.536633377s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.534807174 +0000 UTC m=+156.226027530" watchObservedRunningTime="2026-02-02 22:36:40.536633377 +0000 UTC m=+156.227853713" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.540126 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:40 crc kubenswrapper[4755]: E0202 22:36:40.540490 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:41.040472796 +0000 UTC m=+156.731693122 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.571941 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-76ppj" podStartSLOduration=129.571922836 podStartE2EDuration="2m9.571922836s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.568909079 +0000 UTC m=+156.260129415" watchObservedRunningTime="2026-02-02 22:36:40.571922836 +0000 UTC m=+156.263143162" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.614274 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-2x78p" podStartSLOduration=129.614256216 podStartE2EDuration="2m9.614256216s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.612084604 +0000 UTC m=+156.303304960" watchObservedRunningTime="2026-02-02 22:36:40.614256216 +0000 UTC m=+156.305476542" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.641779 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:40 crc kubenswrapper[4755]: E0202 22:36:40.642179 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:41.142165754 +0000 UTC m=+156.833386080 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.742937 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:40 crc kubenswrapper[4755]: E0202 22:36:40.743524 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:41.243504192 +0000 UTC m=+156.934724528 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.743977 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:40 crc kubenswrapper[4755]: E0202 22:36:40.744339 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:41.244329315 +0000 UTC m=+156.935549651 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.845441 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:40 crc kubenswrapper[4755]: E0202 22:36:40.846769 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:41.346751124 +0000 UTC m=+157.037971450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.887540 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-lkd4r" event={"ID":"439a2e89-139f-4356-ac5f-6325ac0c2a92","Type":"ContainerStarted","Data":"50b7aa664eb504da2638e5b91c8477813cf4561b06862dbc263c7ae2a60df0b6"} Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.890174 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" event={"ID":"ac249dca-286c-4d91-8782-2d3b0676dc68","Type":"ContainerStarted","Data":"f503f8f46980d9b56c2baccb05d25f2eec3d2348d2c9f2aad3db3ef3fb1a3fe0"} Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.892325 4755 generic.go:334] "Generic (PLEG): container finished" podID="56ca6971-8e8a-485d-886d-f5c8eadef3de" containerID="87c536cd5f40de70b6b28045bb6f7686a07bbee1181c2d07aec212dbf3395adf" exitCode=0 Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.892474 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" event={"ID":"56ca6971-8e8a-485d-886d-f5c8eadef3de","Type":"ContainerDied","Data":"87c536cd5f40de70b6b28045bb6f7686a07bbee1181c2d07aec212dbf3395adf"} Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.892549 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.892561 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" event={"ID":"56ca6971-8e8a-485d-886d-f5c8eadef3de","Type":"ContainerStarted","Data":"9d3c4aefb8849f8acc59ce5957fe216a1a718da7cce4dcc44eb728070f7ac2cf"} Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.894615 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-4dnjt" event={"ID":"47a9f45a-7c7f-41fb-b112-6e43666abe9b","Type":"ContainerStarted","Data":"8f9bc9bc1ac0839f9bf4d5c5d859a91cd446c67e0b6100563a057b7b9d121655"} Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.894761 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.896285 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" event={"ID":"209646e7-0d11-4f45-9222-ca6ac4d92268","Type":"ContainerStarted","Data":"d265bba44f17296cf8928b9fdf17338c43b045b00d3228c2e94c7f28ec45fc5e"} Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.896950 4755 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-gv5dm container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.897004 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" podUID="fa9699fd-2f6b-460d-9ca7-95ebba8a5d72" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.897216 4755 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hnz5f container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.897257 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" podUID="467ef27d-8f51-4317-80ee-9071d7024f86" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.897550 4755 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-lrgth container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.897572 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" podUID="09e605cd-967a-462f-8fad-1cf16ef64351" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.897770 4755 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-hg7t4 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.897804 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" podUID="60190c83-51d0-42d2-985b-6f790587622e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.897969 4755 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-sjx8j container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" start-of-body= Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.898042 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" podUID="ca9ec072-2c92-4631-8c7e-1d5b59f85076" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.912984 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-lkd4r" podStartSLOduration=129.912966777 podStartE2EDuration="2m9.912966777s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.911053592 +0000 UTC m=+156.602273938" watchObservedRunningTime="2026-02-02 22:36:40.912966777 +0000 UTC m=+156.604187103" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.947320 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.947590 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4n2db" podStartSLOduration=129.947568556 podStartE2EDuration="2m9.947568556s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.94699364 +0000 UTC m=+156.638213986" watchObservedRunningTime="2026-02-02 22:36:40.947568556 +0000 UTC m=+156.638788882" Feb 02 22:36:40 crc kubenswrapper[4755]: E0202 22:36:40.963372 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:41.463351777 +0000 UTC m=+157.154572103 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:40 crc kubenswrapper[4755]: I0202 22:36:40.990002 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" podStartSLOduration=130.989987189 podStartE2EDuration="2m10.989987189s" podCreationTimestamp="2026-02-02 22:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:40.988433995 +0000 UTC m=+156.679654331" watchObservedRunningTime="2026-02-02 22:36:40.989987189 +0000 UTC m=+156.681207515" Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.031283 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.031585 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.048518 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:41 crc kubenswrapper[4755]: E0202 22:36:41.051054 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:41.551037635 +0000 UTC m=+157.242257961 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.080035 4755 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-7zl97 container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.080084 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" podUID="19316443-6a89-4993-b196-1de2bece6e84" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.081413 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.081443 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.151493 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:41 crc kubenswrapper[4755]: E0202 22:36:41.151937 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:41.651919899 +0000 UTC m=+157.343140225 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.239702 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.252426 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:41 crc kubenswrapper[4755]: E0202 22:36:41.252761 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:41.752714011 +0000 UTC m=+157.443934337 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.263435 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-4dnjt" podStartSLOduration=7.263415927 podStartE2EDuration="7.263415927s" podCreationTimestamp="2026-02-02 22:36:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:41.009396874 +0000 UTC m=+156.700617200" watchObservedRunningTime="2026-02-02 22:36:41.263415927 +0000 UTC m=+156.954636253" Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.354069 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:41 crc kubenswrapper[4755]: E0202 22:36:41.354442 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:41.854425449 +0000 UTC m=+157.545645775 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.361111 4755 patch_prober.go:28] interesting pod/router-default-5444994796-2x78p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 22:36:41 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Feb 02 22:36:41 crc kubenswrapper[4755]: [+]process-running ok Feb 02 22:36:41 crc kubenswrapper[4755]: healthz check failed Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.361175 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2x78p" podUID="d61538e7-e186-4a77-a96a-9d4d9d514837" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.455593 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:41 crc kubenswrapper[4755]: E0202 22:36:41.455695 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:41.955680104 +0000 UTC m=+157.646900430 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.455886 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:41 crc kubenswrapper[4755]: E0202 22:36:41.456182 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:41.956175098 +0000 UTC m=+157.647395424 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.557507 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:41 crc kubenswrapper[4755]: E0202 22:36:41.557944 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.057914037 +0000 UTC m=+157.749134363 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.658930 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:41 crc kubenswrapper[4755]: E0202 22:36:41.659313 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.159297466 +0000 UTC m=+157.850517792 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.729536 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-b5zvf" Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.760440 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:41 crc kubenswrapper[4755]: E0202 22:36:41.760641 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.260613843 +0000 UTC m=+157.951834169 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.760794 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:41 crc kubenswrapper[4755]: E0202 22:36:41.761047 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.261035235 +0000 UTC m=+157.952255551 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.861301 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:41 crc kubenswrapper[4755]: E0202 22:36:41.861606 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.36159194 +0000 UTC m=+158.052812266 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.901298 4755 generic.go:334] "Generic (PLEG): container finished" podID="2d480358-faea-430f-97ad-c49f7878007b" containerID="7ccbfeee59ba0ed97fbce6a9cebca08514e7e81f082d93ca1693781c8081d75c" exitCode=0 Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.901907 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" event={"ID":"2d480358-faea-430f-97ad-c49f7878007b","Type":"ContainerDied","Data":"7ccbfeee59ba0ed97fbce6a9cebca08514e7e81f082d93ca1693781c8081d75c"} Feb 02 22:36:41 crc kubenswrapper[4755]: I0202 22:36:41.962739 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:41 crc kubenswrapper[4755]: E0202 22:36:41.963067 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.463049781 +0000 UTC m=+158.154270107 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.063901 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.064093 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.564067319 +0000 UTC m=+158.255287645 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.064341 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.065713 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.565697766 +0000 UTC m=+158.256918082 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.158882 4755 patch_prober.go:28] interesting pod/apiserver-76f77b778f-vcjbc container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Feb 02 22:36:42 crc kubenswrapper[4755]: [+]log ok Feb 02 22:36:42 crc kubenswrapper[4755]: [+]etcd ok Feb 02 22:36:42 crc kubenswrapper[4755]: [+]poststarthook/start-apiserver-admission-initializer ok Feb 02 22:36:42 crc kubenswrapper[4755]: [+]poststarthook/generic-apiserver-start-informers ok Feb 02 22:36:42 crc kubenswrapper[4755]: [+]poststarthook/max-in-flight-filter ok Feb 02 22:36:42 crc kubenswrapper[4755]: [+]poststarthook/storage-object-count-tracker-hook ok Feb 02 22:36:42 crc kubenswrapper[4755]: [+]poststarthook/image.openshift.io-apiserver-caches ok Feb 02 22:36:42 crc kubenswrapper[4755]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Feb 02 22:36:42 crc kubenswrapper[4755]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Feb 02 22:36:42 crc kubenswrapper[4755]: [+]poststarthook/project.openshift.io-projectcache ok Feb 02 22:36:42 crc kubenswrapper[4755]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Feb 02 22:36:42 crc kubenswrapper[4755]: [-]poststarthook/openshift.io-startinformers failed: reason withheld Feb 02 22:36:42 crc kubenswrapper[4755]: [+]poststarthook/openshift.io-restmapperupdater ok Feb 02 22:36:42 crc kubenswrapper[4755]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Feb 02 22:36:42 crc kubenswrapper[4755]: livez check failed Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.158950 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" podUID="1c3ea74e-377e-4d51-b82a-33e8ea14cf59" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.166612 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.166741 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.666699914 +0000 UTC m=+158.357920240 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.166791 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.167075 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.667062764 +0000 UTC m=+158.358283090 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.267832 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.268027 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.76800248 +0000 UTC m=+158.459222806 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.268083 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.268554 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.768547186 +0000 UTC m=+158.459767512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.353403 4755 patch_prober.go:28] interesting pod/router-default-5444994796-2x78p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 22:36:42 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Feb 02 22:36:42 crc kubenswrapper[4755]: [+]process-running ok Feb 02 22:36:42 crc kubenswrapper[4755]: healthz check failed Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.353468 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2x78p" podUID="d61538e7-e186-4a77-a96a-9d4d9d514837" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.369082 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.369262 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.869229755 +0000 UTC m=+158.560450081 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.369359 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.370116 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.870091899 +0000 UTC m=+158.561312225 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.470707 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.470870 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.97084218 +0000 UTC m=+158.662062506 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.471353 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.471720 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:42.971712495 +0000 UTC m=+158.662932821 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.572148 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.572315 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.07228796 +0000 UTC m=+158.763508286 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.572425 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.572789 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.072776544 +0000 UTC m=+158.763996870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.673631 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.673816 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.173787523 +0000 UTC m=+158.865007849 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.673956 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.674270 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.174257236 +0000 UTC m=+158.865477562 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.774615 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.774915 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.274879863 +0000 UTC m=+158.966100229 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.876175 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.876543 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.376526479 +0000 UTC m=+159.067746895 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.902385 4755 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-sjx8j container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.902465 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" podUID="ca9ec072-2c92-4631-8c7e-1d5b59f85076" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.40:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.908249 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" event={"ID":"209646e7-0d11-4f45-9222-ca6ac4d92268","Type":"ContainerStarted","Data":"a8a393b15ec2a8d7bdbc6ee5d9893d60bea4cc411ac308b85fde81ce68ca5e60"} Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.908307 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" event={"ID":"209646e7-0d11-4f45-9222-ca6ac4d92268","Type":"ContainerStarted","Data":"dec9fbb8bdbfd510582d72d98fe5d5c3e35cf8d8bde17d0ce831cf23529dde70"} Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.935053 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-964gh"] Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.935980 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-964gh" Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.939674 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.964640 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-964gh"] Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.977457 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.977756 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.477707152 +0000 UTC m=+159.168927488 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:42 crc kubenswrapper[4755]: I0202 22:36:42.977903 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:42 crc kubenswrapper[4755]: E0202 22:36:42.978261 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.478248528 +0000 UTC m=+159.169468924 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.079133 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:43 crc kubenswrapper[4755]: E0202 22:36:43.079420 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.57939417 +0000 UTC m=+159.270614496 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.079602 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpn7j\" (UniqueName: \"kubernetes.io/projected/83ede4e1-292f-40c0-8e1f-cc44190a0c92-kube-api-access-qpn7j\") pod \"community-operators-964gh\" (UID: \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\") " pod="openshift-marketplace/community-operators-964gh" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.079662 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.079699 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ede4e1-292f-40c0-8e1f-cc44190a0c92-catalog-content\") pod \"community-operators-964gh\" (UID: \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\") " pod="openshift-marketplace/community-operators-964gh" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.079716 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ede4e1-292f-40c0-8e1f-cc44190a0c92-utilities\") pod \"community-operators-964gh\" (UID: \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\") " pod="openshift-marketplace/community-operators-964gh" Feb 02 22:36:43 crc kubenswrapper[4755]: E0202 22:36:43.080069 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.580058009 +0000 UTC m=+159.271278335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.125375 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9k7fx"] Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.129331 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.133566 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.137368 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9k7fx"] Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.180060 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.180312 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpn7j\" (UniqueName: \"kubernetes.io/projected/83ede4e1-292f-40c0-8e1f-cc44190a0c92-kube-api-access-qpn7j\") pod \"community-operators-964gh\" (UID: \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\") " pod="openshift-marketplace/community-operators-964gh" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.180378 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ede4e1-292f-40c0-8e1f-cc44190a0c92-utilities\") pod \"community-operators-964gh\" (UID: \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\") " pod="openshift-marketplace/community-operators-964gh" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.180394 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ede4e1-292f-40c0-8e1f-cc44190a0c92-catalog-content\") pod \"community-operators-964gh\" (UID: \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\") " pod="openshift-marketplace/community-operators-964gh" Feb 02 22:36:43 crc kubenswrapper[4755]: E0202 22:36:43.180446 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.680417957 +0000 UTC m=+159.371638283 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.180831 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ede4e1-292f-40c0-8e1f-cc44190a0c92-catalog-content\") pod \"community-operators-964gh\" (UID: \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\") " pod="openshift-marketplace/community-operators-964gh" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.181053 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ede4e1-292f-40c0-8e1f-cc44190a0c92-utilities\") pod \"community-operators-964gh\" (UID: \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\") " pod="openshift-marketplace/community-operators-964gh" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.213903 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpn7j\" (UniqueName: \"kubernetes.io/projected/83ede4e1-292f-40c0-8e1f-cc44190a0c92-kube-api-access-qpn7j\") pod \"community-operators-964gh\" (UID: \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\") " pod="openshift-marketplace/community-operators-964gh" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.242932 4755 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.248110 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-964gh" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.281618 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.282037 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2025f58f-a8e1-4009-a95b-946aca049871-catalog-content\") pod \"certified-operators-9k7fx\" (UID: \"2025f58f-a8e1-4009-a95b-946aca049871\") " pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.282095 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wk4p\" (UniqueName: \"kubernetes.io/projected/2025f58f-a8e1-4009-a95b-946aca049871-kube-api-access-2wk4p\") pod \"certified-operators-9k7fx\" (UID: \"2025f58f-a8e1-4009-a95b-946aca049871\") " pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.282118 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2025f58f-a8e1-4009-a95b-946aca049871-utilities\") pod \"certified-operators-9k7fx\" (UID: \"2025f58f-a8e1-4009-a95b-946aca049871\") " pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:36:43 crc kubenswrapper[4755]: E0202 22:36:43.282476 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.782461175 +0000 UTC m=+159.473681511 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.312050 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.323582 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-78h9v"] Feb 02 22:36:43 crc kubenswrapper[4755]: E0202 22:36:43.323817 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d480358-faea-430f-97ad-c49f7878007b" containerName="collect-profiles" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.323838 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d480358-faea-430f-97ad-c49f7878007b" containerName="collect-profiles" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.323975 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d480358-faea-430f-97ad-c49f7878007b" containerName="collect-profiles" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.324820 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.345395 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-78h9v"] Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.362023 4755 patch_prober.go:28] interesting pod/router-default-5444994796-2x78p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 22:36:43 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Feb 02 22:36:43 crc kubenswrapper[4755]: [+]process-running ok Feb 02 22:36:43 crc kubenswrapper[4755]: healthz check failed Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.362066 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2x78p" podUID="d61538e7-e186-4a77-a96a-9d4d9d514837" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.383375 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6r9j\" (UniqueName: \"kubernetes.io/projected/2d480358-faea-430f-97ad-c49f7878007b-kube-api-access-n6r9j\") pod \"2d480358-faea-430f-97ad-c49f7878007b\" (UID: \"2d480358-faea-430f-97ad-c49f7878007b\") " Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.383489 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.383537 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d480358-faea-430f-97ad-c49f7878007b-config-volume\") pod \"2d480358-faea-430f-97ad-c49f7878007b\" (UID: \"2d480358-faea-430f-97ad-c49f7878007b\") " Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.383568 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d480358-faea-430f-97ad-c49f7878007b-secret-volume\") pod \"2d480358-faea-430f-97ad-c49f7878007b\" (UID: \"2d480358-faea-430f-97ad-c49f7878007b\") " Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.383803 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2025f58f-a8e1-4009-a95b-946aca049871-catalog-content\") pod \"certified-operators-9k7fx\" (UID: \"2025f58f-a8e1-4009-a95b-946aca049871\") " pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:36:43 crc kubenswrapper[4755]: E0202 22:36:43.383881 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.883853804 +0000 UTC m=+159.575074130 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.383964 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wk4p\" (UniqueName: \"kubernetes.io/projected/2025f58f-a8e1-4009-a95b-946aca049871-kube-api-access-2wk4p\") pod \"certified-operators-9k7fx\" (UID: \"2025f58f-a8e1-4009-a95b-946aca049871\") " pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.383994 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2025f58f-a8e1-4009-a95b-946aca049871-utilities\") pod \"certified-operators-9k7fx\" (UID: \"2025f58f-a8e1-4009-a95b-946aca049871\") " pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.384404 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2025f58f-a8e1-4009-a95b-946aca049871-catalog-content\") pod \"certified-operators-9k7fx\" (UID: \"2025f58f-a8e1-4009-a95b-946aca049871\") " pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.384445 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2025f58f-a8e1-4009-a95b-946aca049871-utilities\") pod \"certified-operators-9k7fx\" (UID: \"2025f58f-a8e1-4009-a95b-946aca049871\") " pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.384671 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d480358-faea-430f-97ad-c49f7878007b-config-volume" (OuterVolumeSpecName: "config-volume") pod "2d480358-faea-430f-97ad-c49f7878007b" (UID: "2d480358-faea-430f-97ad-c49f7878007b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.404318 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d480358-faea-430f-97ad-c49f7878007b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2d480358-faea-430f-97ad-c49f7878007b" (UID: "2d480358-faea-430f-97ad-c49f7878007b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.404569 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d480358-faea-430f-97ad-c49f7878007b-kube-api-access-n6r9j" (OuterVolumeSpecName: "kube-api-access-n6r9j") pod "2d480358-faea-430f-97ad-c49f7878007b" (UID: "2d480358-faea-430f-97ad-c49f7878007b"). InnerVolumeSpecName "kube-api-access-n6r9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.409368 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wk4p\" (UniqueName: \"kubernetes.io/projected/2025f58f-a8e1-4009-a95b-946aca049871-kube-api-access-2wk4p\") pod \"certified-operators-9k7fx\" (UID: \"2025f58f-a8e1-4009-a95b-946aca049871\") " pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.413626 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5hjgs" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.467290 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.489064 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.489116 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phqf4\" (UniqueName: \"kubernetes.io/projected/8b90fc36-b80c-4011-926b-b1579c7d0ada-kube-api-access-phqf4\") pod \"community-operators-78h9v\" (UID: \"8b90fc36-b80c-4011-926b-b1579c7d0ada\") " pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.489142 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b90fc36-b80c-4011-926b-b1579c7d0ada-utilities\") pod \"community-operators-78h9v\" (UID: \"8b90fc36-b80c-4011-926b-b1579c7d0ada\") " pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.489247 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b90fc36-b80c-4011-926b-b1579c7d0ada-catalog-content\") pod \"community-operators-78h9v\" (UID: \"8b90fc36-b80c-4011-926b-b1579c7d0ada\") " pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.489298 4755 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2d480358-faea-430f-97ad-c49f7878007b-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.489312 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6r9j\" (UniqueName: \"kubernetes.io/projected/2d480358-faea-430f-97ad-c49f7878007b-kube-api-access-n6r9j\") on node \"crc\" DevicePath \"\"" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.489324 4755 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2d480358-faea-430f-97ad-c49f7878007b-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 22:36:43 crc kubenswrapper[4755]: E0202 22:36:43.490362 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:43.990348719 +0000 UTC m=+159.681569055 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.545835 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z4gbr"] Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.546757 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z4gbr"] Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.546837 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.597972 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:43 crc kubenswrapper[4755]: E0202 22:36:43.598500 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:44.09847953 +0000 UTC m=+159.789699856 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.598989 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b90fc36-b80c-4011-926b-b1579c7d0ada-catalog-content\") pod \"community-operators-78h9v\" (UID: \"8b90fc36-b80c-4011-926b-b1579c7d0ada\") " pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.599132 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.599186 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phqf4\" (UniqueName: \"kubernetes.io/projected/8b90fc36-b80c-4011-926b-b1579c7d0ada-kube-api-access-phqf4\") pod \"community-operators-78h9v\" (UID: \"8b90fc36-b80c-4011-926b-b1579c7d0ada\") " pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.599217 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b90fc36-b80c-4011-926b-b1579c7d0ada-utilities\") pod \"community-operators-78h9v\" (UID: \"8b90fc36-b80c-4011-926b-b1579c7d0ada\") " pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.599811 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b90fc36-b80c-4011-926b-b1579c7d0ada-utilities\") pod \"community-operators-78h9v\" (UID: \"8b90fc36-b80c-4011-926b-b1579c7d0ada\") " pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.600070 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b90fc36-b80c-4011-926b-b1579c7d0ada-catalog-content\") pod \"community-operators-78h9v\" (UID: \"8b90fc36-b80c-4011-926b-b1579c7d0ada\") " pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:36:43 crc kubenswrapper[4755]: E0202 22:36:43.600373 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:44.100355344 +0000 UTC m=+159.791575670 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.645304 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phqf4\" (UniqueName: \"kubernetes.io/projected/8b90fc36-b80c-4011-926b-b1579c7d0ada-kube-api-access-phqf4\") pod \"community-operators-78h9v\" (UID: \"8b90fc36-b80c-4011-926b-b1579c7d0ada\") " pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.701247 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.701406 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qvwk\" (UniqueName: \"kubernetes.io/projected/26f31363-966f-44cb-8cf1-fc6b071dad2b-kube-api-access-6qvwk\") pod \"certified-operators-z4gbr\" (UID: \"26f31363-966f-44cb-8cf1-fc6b071dad2b\") " pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.701482 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26f31363-966f-44cb-8cf1-fc6b071dad2b-catalog-content\") pod \"certified-operators-z4gbr\" (UID: \"26f31363-966f-44cb-8cf1-fc6b071dad2b\") " pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.701557 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26f31363-966f-44cb-8cf1-fc6b071dad2b-utilities\") pod \"certified-operators-z4gbr\" (UID: \"26f31363-966f-44cb-8cf1-fc6b071dad2b\") " pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:36:43 crc kubenswrapper[4755]: E0202 22:36:43.701658 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:44.20164355 +0000 UTC m=+159.892863876 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.722074 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-964gh"] Feb 02 22:36:43 crc kubenswrapper[4755]: W0202 22:36:43.734452 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83ede4e1_292f_40c0_8e1f_cc44190a0c92.slice/crio-d2da6b577849fcebe94ac394c7b98fb662318620008ef244953017628aa35fe8 WatchSource:0}: Error finding container d2da6b577849fcebe94ac394c7b98fb662318620008ef244953017628aa35fe8: Status 404 returned error can't find the container with id d2da6b577849fcebe94ac394c7b98fb662318620008ef244953017628aa35fe8 Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.797931 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9k7fx"] Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.803058 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.803133 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26f31363-966f-44cb-8cf1-fc6b071dad2b-utilities\") pod \"certified-operators-z4gbr\" (UID: \"26f31363-966f-44cb-8cf1-fc6b071dad2b\") " pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.803160 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qvwk\" (UniqueName: \"kubernetes.io/projected/26f31363-966f-44cb-8cf1-fc6b071dad2b-kube-api-access-6qvwk\") pod \"certified-operators-z4gbr\" (UID: \"26f31363-966f-44cb-8cf1-fc6b071dad2b\") " pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.803231 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26f31363-966f-44cb-8cf1-fc6b071dad2b-catalog-content\") pod \"certified-operators-z4gbr\" (UID: \"26f31363-966f-44cb-8cf1-fc6b071dad2b\") " pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:36:43 crc kubenswrapper[4755]: E0202 22:36:43.803403 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:44.303388559 +0000 UTC m=+159.994608885 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.803665 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26f31363-966f-44cb-8cf1-fc6b071dad2b-catalog-content\") pod \"certified-operators-z4gbr\" (UID: \"26f31363-966f-44cb-8cf1-fc6b071dad2b\") " pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.803958 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26f31363-966f-44cb-8cf1-fc6b071dad2b-utilities\") pod \"certified-operators-z4gbr\" (UID: \"26f31363-966f-44cb-8cf1-fc6b071dad2b\") " pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.822472 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qvwk\" (UniqueName: \"kubernetes.io/projected/26f31363-966f-44cb-8cf1-fc6b071dad2b-kube-api-access-6qvwk\") pod \"certified-operators-z4gbr\" (UID: \"26f31363-966f-44cb-8cf1-fc6b071dad2b\") " pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.904140 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:43 crc kubenswrapper[4755]: E0202 22:36:43.904483 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:44.404465059 +0000 UTC m=+160.095685385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.918658 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" event={"ID":"2d480358-faea-430f-97ad-c49f7878007b","Type":"ContainerDied","Data":"a24e551c92d643fa9bedcb3f73f4ba5f9db4b924914601ab5ce78f6db0821f71"} Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.918778 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a24e551c92d643fa9bedcb3f73f4ba5f9db4b924914601ab5ce78f6db0821f71" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.918718 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.919667 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9k7fx" event={"ID":"2025f58f-a8e1-4009-a95b-946aca049871","Type":"ContainerStarted","Data":"ddaeb369969862a5b15ff86f6f118f6b1cce7863e8263909644e7b6f7c5f133a"} Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.920940 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.921442 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-964gh" event={"ID":"83ede4e1-292f-40c0-8e1f-cc44190a0c92","Type":"ContainerStarted","Data":"4400763e753b41b7f672fb1b3cf7d232596c6b13075a837d4732685954a8c912"} Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.921470 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-964gh" event={"ID":"83ede4e1-292f-40c0-8e1f-cc44190a0c92","Type":"ContainerStarted","Data":"d2da6b577849fcebe94ac394c7b98fb662318620008ef244953017628aa35fe8"} Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.922779 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.932478 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" event={"ID":"209646e7-0d11-4f45-9222-ca6ac4d92268","Type":"ContainerStarted","Data":"10e13e1fb45c37c1ff62cf291230aa311e2ff81eae3989ebd76dd01dbc0d9e51"} Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.937200 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:36:43 crc kubenswrapper[4755]: I0202 22:36:43.984699 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-gn7kw" podStartSLOduration=9.984683223 podStartE2EDuration="9.984683223s" podCreationTimestamp="2026-02-02 22:36:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:43.981544093 +0000 UTC m=+159.672764419" watchObservedRunningTime="2026-02-02 22:36:43.984683223 +0000 UTC m=+159.675903539" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.005140 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:44 crc kubenswrapper[4755]: E0202 22:36:44.006311 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:44.506299451 +0000 UTC m=+160.197519767 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.031404 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.032017 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.034468 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.034670 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.054463 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.106179 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.106476 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17447692-74f0-4aa8-a99f-479cfe077754-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"17447692-74f0-4aa8-a99f-479cfe077754\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 22:36:44 crc kubenswrapper[4755]: E0202 22:36:44.106562 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 22:36:44.606534627 +0000 UTC m=+160.297754953 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.106605 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/17447692-74f0-4aa8-a99f-479cfe077754-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"17447692-74f0-4aa8-a99f-479cfe077754\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.207659 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.207747 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17447692-74f0-4aa8-a99f-479cfe077754-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"17447692-74f0-4aa8-a99f-479cfe077754\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.207776 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/17447692-74f0-4aa8-a99f-479cfe077754-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"17447692-74f0-4aa8-a99f-479cfe077754\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.207868 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/17447692-74f0-4aa8-a99f-479cfe077754-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"17447692-74f0-4aa8-a99f-479cfe077754\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 22:36:44 crc kubenswrapper[4755]: E0202 22:36:44.208096 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 22:36:44.70808453 +0000 UTC m=+160.399304856 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ch6t8" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.222940 4755 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-02-02T22:36:43.242961545Z","Handler":null,"Name":""} Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.235049 4755 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.235110 4755 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.239319 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17447692-74f0-4aa8-a99f-479cfe077754-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"17447692-74f0-4aa8-a99f-479cfe077754\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.240210 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z4gbr"] Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.267853 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-78h9v"] Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.309932 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.318482 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.354213 4755 patch_prober.go:28] interesting pod/router-default-5444994796-2x78p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 22:36:44 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Feb 02 22:36:44 crc kubenswrapper[4755]: [+]process-running ok Feb 02 22:36:44 crc kubenswrapper[4755]: healthz check failed Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.354274 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2x78p" podUID="d61538e7-e186-4a77-a96a-9d4d9d514837" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.356929 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.412393 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.415120 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.415171 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.438113 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ch6t8\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.525363 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 22:36:44 crc kubenswrapper[4755]: W0202 22:36:44.531696 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod17447692_74f0_4aa8_a99f_479cfe077754.slice/crio-ee1912cd63008ad58044e3ea22070a2d65ed4dd9faa9d56653073b126b8cf72c WatchSource:0}: Error finding container ee1912cd63008ad58044e3ea22070a2d65ed4dd9faa9d56653073b126b8cf72c: Status 404 returned error can't find the container with id ee1912cd63008ad58044e3ea22070a2d65ed4dd9faa9d56653073b126b8cf72c Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.675774 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.856672 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ch6t8"] Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.932139 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g4db9"] Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.942688 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g4db9"] Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.942981 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.946153 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.968540 4755 generic.go:334] "Generic (PLEG): container finished" podID="2025f58f-a8e1-4009-a95b-946aca049871" containerID="7af566c722005b267b5f9e76e05151dc64577f6d9ea516fe91f160b3e33893b8" exitCode=0 Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.968612 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9k7fx" event={"ID":"2025f58f-a8e1-4009-a95b-946aca049871","Type":"ContainerDied","Data":"7af566c722005b267b5f9e76e05151dc64577f6d9ea516fe91f160b3e33893b8"} Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.974109 4755 generic.go:334] "Generic (PLEG): container finished" podID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" containerID="4400763e753b41b7f672fb1b3cf7d232596c6b13075a837d4732685954a8c912" exitCode=0 Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.974156 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-964gh" event={"ID":"83ede4e1-292f-40c0-8e1f-cc44190a0c92","Type":"ContainerDied","Data":"4400763e753b41b7f672fb1b3cf7d232596c6b13075a837d4732685954a8c912"} Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.975296 4755 generic.go:334] "Generic (PLEG): container finished" podID="8b90fc36-b80c-4011-926b-b1579c7d0ada" containerID="ac59bda1ebc7ad3154c1a9781f4674ef4a22fd38b94e0b88636ce5bb85ffc547" exitCode=0 Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.975325 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-78h9v" event={"ID":"8b90fc36-b80c-4011-926b-b1579c7d0ada","Type":"ContainerDied","Data":"ac59bda1ebc7ad3154c1a9781f4674ef4a22fd38b94e0b88636ce5bb85ffc547"} Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.975338 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-78h9v" event={"ID":"8b90fc36-b80c-4011-926b-b1579c7d0ada","Type":"ContainerStarted","Data":"3cc194ab3c1e247e1db6d3500c8b3d151069ce1da8f52bd365db339b91bc59f0"} Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.978179 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" event={"ID":"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9","Type":"ContainerStarted","Data":"9147f3a74e215eff869eabd27c19bbe0d62a372dee2bae5c215b17d89425f692"} Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.980005 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"17447692-74f0-4aa8-a99f-479cfe077754","Type":"ContainerStarted","Data":"ee1912cd63008ad58044e3ea22070a2d65ed4dd9faa9d56653073b126b8cf72c"} Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.982436 4755 generic.go:334] "Generic (PLEG): container finished" podID="26f31363-966f-44cb-8cf1-fc6b071dad2b" containerID="bcb492a25ccaadd0d6192df1865d437a68568c43949f0112c9398a4820d020d6" exitCode=0 Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.983214 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z4gbr" event={"ID":"26f31363-966f-44cb-8cf1-fc6b071dad2b","Type":"ContainerDied","Data":"bcb492a25ccaadd0d6192df1865d437a68568c43949f0112c9398a4820d020d6"} Feb 02 22:36:44 crc kubenswrapper[4755]: I0202 22:36:44.983282 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z4gbr" event={"ID":"26f31363-966f-44cb-8cf1-fc6b071dad2b","Type":"ContainerStarted","Data":"99ea86494fcbd867e623ca51d99dd9a20974174ef13bb9e716db0e3a4fbf8ed8"} Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.027618 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378f0e3d-577f-4e51-a994-411d062c9fba-catalog-content\") pod \"redhat-marketplace-g4db9\" (UID: \"378f0e3d-577f-4e51-a994-411d062c9fba\") " pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.027858 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378f0e3d-577f-4e51-a994-411d062c9fba-utilities\") pod \"redhat-marketplace-g4db9\" (UID: \"378f0e3d-577f-4e51-a994-411d062c9fba\") " pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.027951 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdbft\" (UniqueName: \"kubernetes.io/projected/378f0e3d-577f-4e51-a994-411d062c9fba-kube-api-access-pdbft\") pod \"redhat-marketplace-g4db9\" (UID: \"378f0e3d-577f-4e51-a994-411d062c9fba\") " pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.078301 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.129636 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378f0e3d-577f-4e51-a994-411d062c9fba-utilities\") pod \"redhat-marketplace-g4db9\" (UID: \"378f0e3d-577f-4e51-a994-411d062c9fba\") " pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.129687 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdbft\" (UniqueName: \"kubernetes.io/projected/378f0e3d-577f-4e51-a994-411d062c9fba-kube-api-access-pdbft\") pod \"redhat-marketplace-g4db9\" (UID: \"378f0e3d-577f-4e51-a994-411d062c9fba\") " pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.129713 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378f0e3d-577f-4e51-a994-411d062c9fba-catalog-content\") pod \"redhat-marketplace-g4db9\" (UID: \"378f0e3d-577f-4e51-a994-411d062c9fba\") " pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.130531 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378f0e3d-577f-4e51-a994-411d062c9fba-utilities\") pod \"redhat-marketplace-g4db9\" (UID: \"378f0e3d-577f-4e51-a994-411d062c9fba\") " pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.130616 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378f0e3d-577f-4e51-a994-411d062c9fba-catalog-content\") pod \"redhat-marketplace-g4db9\" (UID: \"378f0e3d-577f-4e51-a994-411d062c9fba\") " pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.157536 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdbft\" (UniqueName: \"kubernetes.io/projected/378f0e3d-577f-4e51-a994-411d062c9fba-kube-api-access-pdbft\") pod \"redhat-marketplace-g4db9\" (UID: \"378f0e3d-577f-4e51-a994-411d062c9fba\") " pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.267363 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.333948 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n98p2"] Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.335209 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.353799 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n98p2"] Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.371512 4755 patch_prober.go:28] interesting pod/router-default-5444994796-2x78p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 22:36:45 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Feb 02 22:36:45 crc kubenswrapper[4755]: [+]process-running ok Feb 02 22:36:45 crc kubenswrapper[4755]: healthz check failed Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.371570 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2x78p" podUID="d61538e7-e186-4a77-a96a-9d4d9d514837" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.440320 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgb26\" (UniqueName: \"kubernetes.io/projected/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-kube-api-access-kgb26\") pod \"redhat-marketplace-n98p2\" (UID: \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\") " pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.440388 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-catalog-content\") pod \"redhat-marketplace-n98p2\" (UID: \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\") " pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.440491 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-utilities\") pod \"redhat-marketplace-n98p2\" (UID: \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\") " pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.541417 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-catalog-content\") pod \"redhat-marketplace-n98p2\" (UID: \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\") " pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.541507 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-utilities\") pod \"redhat-marketplace-n98p2\" (UID: \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\") " pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.541533 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgb26\" (UniqueName: \"kubernetes.io/projected/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-kube-api-access-kgb26\") pod \"redhat-marketplace-n98p2\" (UID: \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\") " pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.542457 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-utilities\") pod \"redhat-marketplace-n98p2\" (UID: \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\") " pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.542467 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-catalog-content\") pod \"redhat-marketplace-n98p2\" (UID: \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\") " pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.571517 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgb26\" (UniqueName: \"kubernetes.io/projected/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-kube-api-access-kgb26\") pod \"redhat-marketplace-n98p2\" (UID: \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\") " pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.671246 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g4db9"] Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.672896 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:36:45 crc kubenswrapper[4755]: W0202 22:36:45.677027 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod378f0e3d_577f_4e51_a994_411d062c9fba.slice/crio-0dd180a935804a51afc53dee2e40f356608f8c0b156118e5ccca9c6e04bc0543 WatchSource:0}: Error finding container 0dd180a935804a51afc53dee2e40f356608f8c0b156118e5ccca9c6e04bc0543: Status 404 returned error can't find the container with id 0dd180a935804a51afc53dee2e40f356608f8c0b156118e5ccca9c6e04bc0543 Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.896524 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n98p2"] Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.988768 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" event={"ID":"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9","Type":"ContainerStarted","Data":"6573d5df5f700f87f9f5cd4e06ba0f332772de175a985adae854394930d921bf"} Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.989623 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.991688 4755 generic.go:334] "Generic (PLEG): container finished" podID="17447692-74f0-4aa8-a99f-479cfe077754" containerID="93f476b48ab97a0d7989e2ca67897a2085abe2834082cf85b72679da312ec9c8" exitCode=0 Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.991719 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"17447692-74f0-4aa8-a99f-479cfe077754","Type":"ContainerDied","Data":"93f476b48ab97a0d7989e2ca67897a2085abe2834082cf85b72679da312ec9c8"} Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.992841 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n98p2" event={"ID":"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc","Type":"ContainerStarted","Data":"0a49b33725e514ed39d67e098b79f507ed3d78429f6463b1e642db0623f84892"} Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.995308 4755 generic.go:334] "Generic (PLEG): container finished" podID="378f0e3d-577f-4e51-a994-411d062c9fba" containerID="dac9dbeb132aa3573b6030831983ebc2a5e95cc337c40c5e321db6c7bc73887e" exitCode=0 Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.995340 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4db9" event={"ID":"378f0e3d-577f-4e51-a994-411d062c9fba","Type":"ContainerDied","Data":"dac9dbeb132aa3573b6030831983ebc2a5e95cc337c40c5e321db6c7bc73887e"} Feb 02 22:36:45 crc kubenswrapper[4755]: I0202 22:36:45.995355 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4db9" event={"ID":"378f0e3d-577f-4e51-a994-411d062c9fba","Type":"ContainerStarted","Data":"0dd180a935804a51afc53dee2e40f356608f8c0b156118e5ccca9c6e04bc0543"} Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.010772 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" podStartSLOduration=135.010757302 podStartE2EDuration="2m15.010757302s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:46.008061215 +0000 UTC m=+161.699281541" watchObservedRunningTime="2026-02-02 22:36:46.010757302 +0000 UTC m=+161.701977628" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.033812 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.038015 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-vcjbc" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.089148 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.105067 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-7zl97" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.153783 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4hn7q"] Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.172488 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.179607 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.187487 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4hn7q"] Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.199670 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.199745 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.199992 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.200021 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.263027 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-catalog-content\") pod \"redhat-operators-4hn7q\" (UID: \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\") " pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.263074 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b6qc\" (UniqueName: \"kubernetes.io/projected/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-kube-api-access-2b6qc\") pod \"redhat-operators-4hn7q\" (UID: \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\") " pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.263252 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-utilities\") pod \"redhat-operators-4hn7q\" (UID: \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\") " pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.340831 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.340863 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:46 crc kubenswrapper[4755]: E0202 22:36:46.341661 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4e6bc82_8080_4a17_8c5a_7b20eaec23bc.slice/crio-abdd1c5c0420a986a1851f6beb6f16591b07bb0c6a67d9493e93c3352ed7a554.scope\": RecentStats: unable to find data in memory cache]" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.354937 4755 patch_prober.go:28] interesting pod/router-default-5444994796-2x78p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 22:36:46 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Feb 02 22:36:46 crc kubenswrapper[4755]: [+]process-running ok Feb 02 22:36:46 crc kubenswrapper[4755]: healthz check failed Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.355003 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2x78p" podUID="d61538e7-e186-4a77-a96a-9d4d9d514837" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.364169 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-utilities\") pod \"redhat-operators-4hn7q\" (UID: \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\") " pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.364269 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-catalog-content\") pod \"redhat-operators-4hn7q\" (UID: \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\") " pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.364289 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b6qc\" (UniqueName: \"kubernetes.io/projected/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-kube-api-access-2b6qc\") pod \"redhat-operators-4hn7q\" (UID: \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\") " pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.365111 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-utilities\") pod \"redhat-operators-4hn7q\" (UID: \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\") " pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.365178 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-catalog-content\") pod \"redhat-operators-4hn7q\" (UID: \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\") " pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.368930 4755 patch_prober.go:28] interesting pod/console-f9d7485db-rwshx container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.26:8443/health\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.368970 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-rwshx" podUID="0124b915-2ac4-4be7-b356-bf78a8295d9d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.26:8443/health\": dial tcp 10.217.0.26:8443: connect: connection refused" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.385522 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b6qc\" (UniqueName: \"kubernetes.io/projected/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-kube-api-access-2b6qc\") pod \"redhat-operators-4hn7q\" (UID: \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\") " pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.483325 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.526888 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qnqzm"] Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.528057 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.555934 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qnqzm"] Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.582335 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.671621 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-utilities\") pod \"redhat-operators-qnqzm\" (UID: \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\") " pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.671790 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjrn8\" (UniqueName: \"kubernetes.io/projected/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-kube-api-access-zjrn8\") pod \"redhat-operators-qnqzm\" (UID: \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\") " pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.671821 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-catalog-content\") pod \"redhat-operators-qnqzm\" (UID: \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\") " pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.772628 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjrn8\" (UniqueName: \"kubernetes.io/projected/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-kube-api-access-zjrn8\") pod \"redhat-operators-qnqzm\" (UID: \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\") " pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.772669 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-catalog-content\") pod \"redhat-operators-qnqzm\" (UID: \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\") " pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.772692 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-utilities\") pod \"redhat-operators-qnqzm\" (UID: \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\") " pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.773516 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-utilities\") pod \"redhat-operators-qnqzm\" (UID: \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\") " pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.773649 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-catalog-content\") pod \"redhat-operators-qnqzm\" (UID: \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\") " pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.790532 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjrn8\" (UniqueName: \"kubernetes.io/projected/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-kube-api-access-zjrn8\") pod \"redhat-operators-qnqzm\" (UID: \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\") " pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.865007 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:36:46 crc kubenswrapper[4755]: I0202 22:36:46.891418 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4hn7q"] Feb 02 22:36:46 crc kubenswrapper[4755]: W0202 22:36:46.944918 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e73c4d3_3b24_40d0_af22_fbf37ec4716d.slice/crio-4da64895c9a0304f263668d1410609e9972235f66c08062307a32661e389a02a WatchSource:0}: Error finding container 4da64895c9a0304f263668d1410609e9972235f66c08062307a32661e389a02a: Status 404 returned error can't find the container with id 4da64895c9a0304f263668d1410609e9972235f66c08062307a32661e389a02a Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.010105 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4hn7q" event={"ID":"4e73c4d3-3b24-40d0-af22-fbf37ec4716d","Type":"ContainerStarted","Data":"4da64895c9a0304f263668d1410609e9972235f66c08062307a32661e389a02a"} Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.027071 4755 generic.go:334] "Generic (PLEG): container finished" podID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" containerID="abdd1c5c0420a986a1851f6beb6f16591b07bb0c6a67d9493e93c3352ed7a554" exitCode=0 Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.028444 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n98p2" event={"ID":"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc","Type":"ContainerDied","Data":"abdd1c5c0420a986a1851f6beb6f16591b07bb0c6a67d9493e93c3352ed7a554"} Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.086460 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hg7t4" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.120006 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.123550 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.128190 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.132784 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.134115 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.139983 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sjx8j" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.154858 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.284559 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/068be7d3-9a9e-4ec8-bd23-3a8a44b68c06-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.284972 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/068be7d3-9a9e-4ec8-bd23-3a8a44b68c06-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.314652 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-gv5dm" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.350293 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.353359 4755 patch_prober.go:28] interesting pod/router-default-5444994796-2x78p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 22:36:47 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Feb 02 22:36:47 crc kubenswrapper[4755]: [+]process-running ok Feb 02 22:36:47 crc kubenswrapper[4755]: healthz check failed Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.353431 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2x78p" podUID="d61538e7-e186-4a77-a96a-9d4d9d514837" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.386235 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/068be7d3-9a9e-4ec8-bd23-3a8a44b68c06-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.386297 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/068be7d3-9a9e-4ec8-bd23-3a8a44b68c06-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.386359 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/068be7d3-9a9e-4ec8-bd23-3a8a44b68c06-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.409396 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/068be7d3-9a9e-4ec8-bd23-3a8a44b68c06-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.459197 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.507057 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.592994 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17447692-74f0-4aa8-a99f-479cfe077754-kube-api-access\") pod \"17447692-74f0-4aa8-a99f-479cfe077754\" (UID: \"17447692-74f0-4aa8-a99f-479cfe077754\") " Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.593072 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/17447692-74f0-4aa8-a99f-479cfe077754-kubelet-dir\") pod \"17447692-74f0-4aa8-a99f-479cfe077754\" (UID: \"17447692-74f0-4aa8-a99f-479cfe077754\") " Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.593322 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/17447692-74f0-4aa8-a99f-479cfe077754-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "17447692-74f0-4aa8-a99f-479cfe077754" (UID: "17447692-74f0-4aa8-a99f-479cfe077754"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.600591 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17447692-74f0-4aa8-a99f-479cfe077754-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "17447692-74f0-4aa8-a99f-479cfe077754" (UID: "17447692-74f0-4aa8-a99f-479cfe077754"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.651088 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qnqzm"] Feb 02 22:36:47 crc kubenswrapper[4755]: W0202 22:36:47.666854 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e39e3e7_7d3c_4fe3_bc48_459aecbf1391.slice/crio-e9debae8c766159b56e58030189378511c77617fc38c5c986733b8548d658f90 WatchSource:0}: Error finding container e9debae8c766159b56e58030189378511c77617fc38c5c986733b8548d658f90: Status 404 returned error can't find the container with id e9debae8c766159b56e58030189378511c77617fc38c5c986733b8548d658f90 Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.694220 4755 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/17447692-74f0-4aa8-a99f-479cfe077754-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.694254 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17447692-74f0-4aa8-a99f-479cfe077754-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 22:36:47 crc kubenswrapper[4755]: I0202 22:36:47.699171 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 22:36:47 crc kubenswrapper[4755]: W0202 22:36:47.738314 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod068be7d3_9a9e_4ec8_bd23_3a8a44b68c06.slice/crio-70603c7e564b4f12225075b9fb8001435c610d08e631462c42f0ee50ccb89cda WatchSource:0}: Error finding container 70603c7e564b4f12225075b9fb8001435c610d08e631462c42f0ee50ccb89cda: Status 404 returned error can't find the container with id 70603c7e564b4f12225075b9fb8001435c610d08e631462c42f0ee50ccb89cda Feb 02 22:36:48 crc kubenswrapper[4755]: I0202 22:36:48.035084 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 22:36:48 crc kubenswrapper[4755]: I0202 22:36:48.035076 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"17447692-74f0-4aa8-a99f-479cfe077754","Type":"ContainerDied","Data":"ee1912cd63008ad58044e3ea22070a2d65ed4dd9faa9d56653073b126b8cf72c"} Feb 02 22:36:48 crc kubenswrapper[4755]: I0202 22:36:48.035668 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee1912cd63008ad58044e3ea22070a2d65ed4dd9faa9d56653073b126b8cf72c" Feb 02 22:36:48 crc kubenswrapper[4755]: I0202 22:36:48.037604 4755 generic.go:334] "Generic (PLEG): container finished" podID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" containerID="f862110e464467891a7975a0b24312c73f15d43d7661d282a379c3ac64168a62" exitCode=0 Feb 02 22:36:48 crc kubenswrapper[4755]: I0202 22:36:48.037683 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4hn7q" event={"ID":"4e73c4d3-3b24-40d0-af22-fbf37ec4716d","Type":"ContainerDied","Data":"f862110e464467891a7975a0b24312c73f15d43d7661d282a379c3ac64168a62"} Feb 02 22:36:48 crc kubenswrapper[4755]: I0202 22:36:48.039637 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06","Type":"ContainerStarted","Data":"70603c7e564b4f12225075b9fb8001435c610d08e631462c42f0ee50ccb89cda"} Feb 02 22:36:48 crc kubenswrapper[4755]: I0202 22:36:48.041822 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qnqzm" event={"ID":"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391","Type":"ContainerStarted","Data":"e9debae8c766159b56e58030189378511c77617fc38c5c986733b8548d658f90"} Feb 02 22:36:48 crc kubenswrapper[4755]: I0202 22:36:48.358533 4755 patch_prober.go:28] interesting pod/router-default-5444994796-2x78p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 22:36:48 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Feb 02 22:36:48 crc kubenswrapper[4755]: [+]process-running ok Feb 02 22:36:48 crc kubenswrapper[4755]: healthz check failed Feb 02 22:36:48 crc kubenswrapper[4755]: I0202 22:36:48.358596 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2x78p" podUID="d61538e7-e186-4a77-a96a-9d4d9d514837" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 22:36:49 crc kubenswrapper[4755]: I0202 22:36:49.058757 4755 generic.go:334] "Generic (PLEG): container finished" podID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" containerID="691eb14585777af7925dadae1d5316a3841cd068112ba72fa2fc5e21a45ef3e8" exitCode=0 Feb 02 22:36:49 crc kubenswrapper[4755]: I0202 22:36:49.058906 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qnqzm" event={"ID":"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391","Type":"ContainerDied","Data":"691eb14585777af7925dadae1d5316a3841cd068112ba72fa2fc5e21a45ef3e8"} Feb 02 22:36:49 crc kubenswrapper[4755]: I0202 22:36:49.079370 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06","Type":"ContainerStarted","Data":"fe47c4652abaeb0f396fdfe988a3c7c686e7fef62fed958c72de77248a2c56d3"} Feb 02 22:36:49 crc kubenswrapper[4755]: I0202 22:36:49.107521 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.107505163 podStartE2EDuration="2.107505163s" podCreationTimestamp="2026-02-02 22:36:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:36:49.103411246 +0000 UTC m=+164.794631582" watchObservedRunningTime="2026-02-02 22:36:49.107505163 +0000 UTC m=+164.798725489" Feb 02 22:36:49 crc kubenswrapper[4755]: I0202 22:36:49.161987 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-4dnjt" Feb 02 22:36:49 crc kubenswrapper[4755]: I0202 22:36:49.352408 4755 patch_prober.go:28] interesting pod/router-default-5444994796-2x78p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 22:36:49 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Feb 02 22:36:49 crc kubenswrapper[4755]: [+]process-running ok Feb 02 22:36:49 crc kubenswrapper[4755]: healthz check failed Feb 02 22:36:49 crc kubenswrapper[4755]: I0202 22:36:49.352454 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2x78p" podUID="d61538e7-e186-4a77-a96a-9d4d9d514837" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 22:36:50 crc kubenswrapper[4755]: I0202 22:36:50.082832 4755 generic.go:334] "Generic (PLEG): container finished" podID="068be7d3-9a9e-4ec8-bd23-3a8a44b68c06" containerID="fe47c4652abaeb0f396fdfe988a3c7c686e7fef62fed958c72de77248a2c56d3" exitCode=0 Feb 02 22:36:50 crc kubenswrapper[4755]: I0202 22:36:50.082882 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06","Type":"ContainerDied","Data":"fe47c4652abaeb0f396fdfe988a3c7c686e7fef62fed958c72de77248a2c56d3"} Feb 02 22:36:50 crc kubenswrapper[4755]: I0202 22:36:50.352537 4755 patch_prober.go:28] interesting pod/router-default-5444994796-2x78p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 22:36:50 crc kubenswrapper[4755]: [-]has-synced failed: reason withheld Feb 02 22:36:50 crc kubenswrapper[4755]: [+]process-running ok Feb 02 22:36:50 crc kubenswrapper[4755]: healthz check failed Feb 02 22:36:50 crc kubenswrapper[4755]: I0202 22:36:50.352582 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2x78p" podUID="d61538e7-e186-4a77-a96a-9d4d9d514837" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 22:36:51 crc kubenswrapper[4755]: I0202 22:36:51.358869 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:51 crc kubenswrapper[4755]: I0202 22:36:51.364256 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-2x78p" Feb 02 22:36:51 crc kubenswrapper[4755]: I0202 22:36:51.440582 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 22:36:51 crc kubenswrapper[4755]: I0202 22:36:51.481801 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/068be7d3-9a9e-4ec8-bd23-3a8a44b68c06-kubelet-dir\") pod \"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06\" (UID: \"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06\") " Feb 02 22:36:51 crc kubenswrapper[4755]: I0202 22:36:51.481932 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/068be7d3-9a9e-4ec8-bd23-3a8a44b68c06-kube-api-access\") pod \"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06\" (UID: \"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06\") " Feb 02 22:36:51 crc kubenswrapper[4755]: I0202 22:36:51.483376 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/068be7d3-9a9e-4ec8-bd23-3a8a44b68c06-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "068be7d3-9a9e-4ec8-bd23-3a8a44b68c06" (UID: "068be7d3-9a9e-4ec8-bd23-3a8a44b68c06"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:36:51 crc kubenswrapper[4755]: I0202 22:36:51.490253 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/068be7d3-9a9e-4ec8-bd23-3a8a44b68c06-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "068be7d3-9a9e-4ec8-bd23-3a8a44b68c06" (UID: "068be7d3-9a9e-4ec8-bd23-3a8a44b68c06"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:36:51 crc kubenswrapper[4755]: I0202 22:36:51.582986 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/068be7d3-9a9e-4ec8-bd23-3a8a44b68c06-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 22:36:51 crc kubenswrapper[4755]: I0202 22:36:51.583027 4755 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/068be7d3-9a9e-4ec8-bd23-3a8a44b68c06-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 22:36:52 crc kubenswrapper[4755]: I0202 22:36:52.111121 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 22:36:52 crc kubenswrapper[4755]: I0202 22:36:52.111219 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"068be7d3-9a9e-4ec8-bd23-3a8a44b68c06","Type":"ContainerDied","Data":"70603c7e564b4f12225075b9fb8001435c610d08e631462c42f0ee50ccb89cda"} Feb 02 22:36:52 crc kubenswrapper[4755]: I0202 22:36:52.111255 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70603c7e564b4f12225075b9fb8001435c610d08e631462c42f0ee50ccb89cda" Feb 02 22:36:53 crc kubenswrapper[4755]: I0202 22:36:53.389908 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:36:53 crc kubenswrapper[4755]: I0202 22:36:53.390270 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:36:53 crc kubenswrapper[4755]: I0202 22:36:53.616976 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:53 crc kubenswrapper[4755]: I0202 22:36:53.634715 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923-metrics-certs\") pod \"network-metrics-daemon-k8tml\" (UID: \"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923\") " pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:53 crc kubenswrapper[4755]: I0202 22:36:53.789505 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-k8tml" Feb 02 22:36:56 crc kubenswrapper[4755]: I0202 22:36:56.198065 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:36:56 crc kubenswrapper[4755]: I0202 22:36:56.198404 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:36:56 crc kubenswrapper[4755]: I0202 22:36:56.198072 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:36:56 crc kubenswrapper[4755]: I0202 22:36:56.198490 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:36:56 crc kubenswrapper[4755]: I0202 22:36:56.353823 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:36:56 crc kubenswrapper[4755]: I0202 22:36:56.363130 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:37:00 crc kubenswrapper[4755]: I0202 22:37:00.086555 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lrgth"] Feb 02 22:37:00 crc kubenswrapper[4755]: I0202 22:37:00.087433 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" podUID="09e605cd-967a-462f-8fad-1cf16ef64351" containerName="controller-manager" containerID="cri-o://bfe4c5c8941e89bdb4dc0a161815c6045571e3820de4985581df364d35a76229" gracePeriod=30 Feb 02 22:37:00 crc kubenswrapper[4755]: I0202 22:37:00.097524 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx"] Feb 02 22:37:00 crc kubenswrapper[4755]: I0202 22:37:00.097751 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" podUID="fbc07c99-ae2c-459e-9731-ca524c8bfa08" containerName="route-controller-manager" containerID="cri-o://a151fab0c988d424ed0d48f166e5abd543b8158db33024ca22de1a7eee5d49a7" gracePeriod=30 Feb 02 22:37:01 crc kubenswrapper[4755]: I0202 22:37:01.235545 4755 generic.go:334] "Generic (PLEG): container finished" podID="09e605cd-967a-462f-8fad-1cf16ef64351" containerID="bfe4c5c8941e89bdb4dc0a161815c6045571e3820de4985581df364d35a76229" exitCode=0 Feb 02 22:37:01 crc kubenswrapper[4755]: I0202 22:37:01.235657 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" event={"ID":"09e605cd-967a-462f-8fad-1cf16ef64351","Type":"ContainerDied","Data":"bfe4c5c8941e89bdb4dc0a161815c6045571e3820de4985581df364d35a76229"} Feb 02 22:37:01 crc kubenswrapper[4755]: I0202 22:37:01.237759 4755 generic.go:334] "Generic (PLEG): container finished" podID="fbc07c99-ae2c-459e-9731-ca524c8bfa08" containerID="a151fab0c988d424ed0d48f166e5abd543b8158db33024ca22de1a7eee5d49a7" exitCode=0 Feb 02 22:37:01 crc kubenswrapper[4755]: I0202 22:37:01.237811 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" event={"ID":"fbc07c99-ae2c-459e-9731-ca524c8bfa08","Type":"ContainerDied","Data":"a151fab0c988d424ed0d48f166e5abd543b8158db33024ca22de1a7eee5d49a7"} Feb 02 22:37:04 crc kubenswrapper[4755]: I0202 22:37:04.237318 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 22:37:04 crc kubenswrapper[4755]: I0202 22:37:04.683804 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.093338 4755 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-m8cdx container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.093425 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" podUID="fbc07c99-ae2c-459e-9731-ca524c8bfa08" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.198556 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.198583 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.198622 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.198633 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.198684 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-n987n" Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.199127 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.199157 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.199298 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"5822479225fc39e5208520b955aef1a0dcf1de3cf208958e1d35e61bd27eed98"} pod="openshift-console/downloads-7954f5f757-n987n" containerMessage="Container download-server failed liveness probe, will be restarted" Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.199398 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" containerID="cri-o://5822479225fc39e5208520b955aef1a0dcf1de3cf208958e1d35e61bd27eed98" gracePeriod=2 Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.480114 4755 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-lrgth container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Feb 02 22:37:06 crc kubenswrapper[4755]: I0202 22:37:06.480175 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" podUID="09e605cd-967a-462f-8fad-1cf16ef64351" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Feb 02 22:37:07 crc kubenswrapper[4755]: I0202 22:37:07.270680 4755 generic.go:334] "Generic (PLEG): container finished" podID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerID="5822479225fc39e5208520b955aef1a0dcf1de3cf208958e1d35e61bd27eed98" exitCode=0 Feb 02 22:37:07 crc kubenswrapper[4755]: I0202 22:37:07.270721 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-n987n" event={"ID":"a6f21874-ee8f-4718-b2ab-8b4a97543364","Type":"ContainerDied","Data":"5822479225fc39e5208520b955aef1a0dcf1de3cf208958e1d35e61bd27eed98"} Feb 02 22:37:14 crc kubenswrapper[4755]: E0202 22:37:14.082014 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 02 22:37:14 crc kubenswrapper[4755]: E0202 22:37:14.082644 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6qvwk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-z4gbr_openshift-marketplace(26f31363-966f-44cb-8cf1-fc6b071dad2b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 22:37:14 crc kubenswrapper[4755]: E0202 22:37:14.083940 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-z4gbr" podUID="26f31363-966f-44cb-8cf1-fc6b071dad2b" Feb 02 22:37:14 crc kubenswrapper[4755]: E0202 22:37:14.341890 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 02 22:37:14 crc kubenswrapper[4755]: E0202 22:37:14.342100 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2wk4p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-9k7fx_openshift-marketplace(2025f58f-a8e1-4009-a95b-946aca049871): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 22:37:14 crc kubenswrapper[4755]: E0202 22:37:14.343397 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-9k7fx" podUID="2025f58f-a8e1-4009-a95b-946aca049871" Feb 02 22:37:14 crc kubenswrapper[4755]: E0202 22:37:14.668273 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-z4gbr" podUID="26f31363-966f-44cb-8cf1-fc6b071dad2b" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.765314 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.823340 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-b4698c5-6l92t"] Feb 02 22:37:14 crc kubenswrapper[4755]: E0202 22:37:14.823664 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="068be7d3-9a9e-4ec8-bd23-3a8a44b68c06" containerName="pruner" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.823681 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="068be7d3-9a9e-4ec8-bd23-3a8a44b68c06" containerName="pruner" Feb 02 22:37:14 crc kubenswrapper[4755]: E0202 22:37:14.823692 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17447692-74f0-4aa8-a99f-479cfe077754" containerName="pruner" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.823700 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="17447692-74f0-4aa8-a99f-479cfe077754" containerName="pruner" Feb 02 22:37:14 crc kubenswrapper[4755]: E0202 22:37:14.823719 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e605cd-967a-462f-8fad-1cf16ef64351" containerName="controller-manager" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.823748 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e605cd-967a-462f-8fad-1cf16ef64351" containerName="controller-manager" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.823865 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e605cd-967a-462f-8fad-1cf16ef64351" containerName="controller-manager" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.823883 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="17447692-74f0-4aa8-a99f-479cfe077754" containerName="pruner" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.823900 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="068be7d3-9a9e-4ec8-bd23-3a8a44b68c06" containerName="pruner" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.824463 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.827888 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-b4698c5-6l92t"] Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.919614 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-proxy-ca-bundles\") pod \"09e605cd-967a-462f-8fad-1cf16ef64351\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.920001 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-config\") pod \"09e605cd-967a-462f-8fad-1cf16ef64351\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.920055 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8868\" (UniqueName: \"kubernetes.io/projected/09e605cd-967a-462f-8fad-1cf16ef64351-kube-api-access-p8868\") pod \"09e605cd-967a-462f-8fad-1cf16ef64351\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.920075 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09e605cd-967a-462f-8fad-1cf16ef64351-serving-cert\") pod \"09e605cd-967a-462f-8fad-1cf16ef64351\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.920112 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-client-ca\") pod \"09e605cd-967a-462f-8fad-1cf16ef64351\" (UID: \"09e605cd-967a-462f-8fad-1cf16ef64351\") " Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.920693 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-client-ca" (OuterVolumeSpecName: "client-ca") pod "09e605cd-967a-462f-8fad-1cf16ef64351" (UID: "09e605cd-967a-462f-8fad-1cf16ef64351"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.920706 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "09e605cd-967a-462f-8fad-1cf16ef64351" (UID: "09e605cd-967a-462f-8fad-1cf16ef64351"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.921297 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-config" (OuterVolumeSpecName: "config") pod "09e605cd-967a-462f-8fad-1cf16ef64351" (UID: "09e605cd-967a-462f-8fad-1cf16ef64351"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.927298 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09e605cd-967a-462f-8fad-1cf16ef64351-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09e605cd-967a-462f-8fad-1cf16ef64351" (UID: "09e605cd-967a-462f-8fad-1cf16ef64351"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:37:14 crc kubenswrapper[4755]: I0202 22:37:14.950242 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09e605cd-967a-462f-8fad-1cf16ef64351-kube-api-access-p8868" (OuterVolumeSpecName: "kube-api-access-p8868") pod "09e605cd-967a-462f-8fad-1cf16ef64351" (UID: "09e605cd-967a-462f-8fad-1cf16ef64351"). InnerVolumeSpecName "kube-api-access-p8868". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.021290 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-config\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.021347 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-client-ca\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.021393 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b87mc\" (UniqueName: \"kubernetes.io/projected/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-kube-api-access-b87mc\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.021635 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-proxy-ca-bundles\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.021692 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-serving-cert\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.022008 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.022060 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8868\" (UniqueName: \"kubernetes.io/projected/09e605cd-967a-462f-8fad-1cf16ef64351-kube-api-access-p8868\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.022081 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09e605cd-967a-462f-8fad-1cf16ef64351-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.022096 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.022112 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/09e605cd-967a-462f-8fad-1cf16ef64351-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.124618 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-serving-cert\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.124908 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-config\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.125029 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-client-ca\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.125227 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b87mc\" (UniqueName: \"kubernetes.io/projected/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-kube-api-access-b87mc\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.125508 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-proxy-ca-bundles\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.127061 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-config\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.134479 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-serving-cert\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.153921 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b87mc\" (UniqueName: \"kubernetes.io/projected/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-kube-api-access-b87mc\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.157937 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-k8tml"] Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.198572 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-proxy-ca-bundles\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.200076 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-client-ca\") pod \"controller-manager-b4698c5-6l92t\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.329648 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" event={"ID":"09e605cd-967a-462f-8fad-1cf16ef64351","Type":"ContainerDied","Data":"13b3776ea94e86b139d8572241e9bdb06828725cae1683fa3970ebfd79a30e83"} Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.329684 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lrgth" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.329769 4755 scope.go:117] "RemoveContainer" containerID="bfe4c5c8941e89bdb4dc0a161815c6045571e3820de4985581df364d35a76229" Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.381284 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lrgth"] Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.386626 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lrgth"] Feb 02 22:37:15 crc kubenswrapper[4755]: I0202 22:37:15.467656 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:16 crc kubenswrapper[4755]: I0202 22:37:16.198662 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:37:16 crc kubenswrapper[4755]: I0202 22:37:16.198760 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:37:17 crc kubenswrapper[4755]: I0202 22:37:17.016572 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kc7j8" Feb 02 22:37:17 crc kubenswrapper[4755]: I0202 22:37:17.084073 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09e605cd-967a-462f-8fad-1cf16ef64351" path="/var/lib/kubelet/pods/09e605cd-967a-462f-8fad-1cf16ef64351/volumes" Feb 02 22:37:17 crc kubenswrapper[4755]: I0202 22:37:17.093933 4755 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-m8cdx container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 22:37:17 crc kubenswrapper[4755]: I0202 22:37:17.094030 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" podUID="fbc07c99-ae2c-459e-9731-ca524c8bfa08" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 02 22:37:19 crc kubenswrapper[4755]: E0202 22:37:19.066080 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-9k7fx" podUID="2025f58f-a8e1-4009-a95b-946aca049871" Feb 02 22:37:19 crc kubenswrapper[4755]: E0202 22:37:19.137317 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 02 22:37:19 crc kubenswrapper[4755]: E0202 22:37:19.137549 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pdbft,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-g4db9_openshift-marketplace(378f0e3d-577f-4e51-a994-411d062c9fba): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 22:37:19 crc kubenswrapper[4755]: E0202 22:37:19.138993 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-g4db9" podUID="378f0e3d-577f-4e51-a994-411d062c9fba" Feb 02 22:37:20 crc kubenswrapper[4755]: E0202 22:37:20.017543 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 02 22:37:20 crc kubenswrapper[4755]: E0202 22:37:20.017758 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-phqf4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-78h9v_openshift-marketplace(8b90fc36-b80c-4011-926b-b1579c7d0ada): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 22:37:20 crc kubenswrapper[4755]: E0202 22:37:20.019587 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-78h9v" podUID="8b90fc36-b80c-4011-926b-b1579c7d0ada" Feb 02 22:37:20 crc kubenswrapper[4755]: I0202 22:37:20.023784 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-b4698c5-6l92t"] Feb 02 22:37:21 crc kubenswrapper[4755]: E0202 22:37:21.000448 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 02 22:37:21 crc kubenswrapper[4755]: E0202 22:37:21.000828 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qpn7j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-964gh_openshift-marketplace(83ede4e1-292f-40c0-8e1f-cc44190a0c92): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 22:37:21 crc kubenswrapper[4755]: E0202 22:37:21.002675 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-964gh" podUID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" Feb 02 22:37:23 crc kubenswrapper[4755]: E0202 22:37:23.293143 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-964gh" podUID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" Feb 02 22:37:23 crc kubenswrapper[4755]: W0202 22:37:23.293296 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d013a2d_6ad7_40d5_b4f7_0f4b4eacc923.slice/crio-ee4a0af48e9dc8f4e6d3d56de65e5d9749df2df144d2a6aac078605552975fae WatchSource:0}: Error finding container ee4a0af48e9dc8f4e6d3d56de65e5d9749df2df144d2a6aac078605552975fae: Status 404 returned error can't find the container with id ee4a0af48e9dc8f4e6d3d56de65e5d9749df2df144d2a6aac078605552975fae Feb 02 22:37:23 crc kubenswrapper[4755]: E0202 22:37:23.304298 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-78h9v" podUID="8b90fc36-b80c-4011-926b-b1579c7d0ada" Feb 02 22:37:23 crc kubenswrapper[4755]: E0202 22:37:23.304968 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-g4db9" podUID="378f0e3d-577f-4e51-a994-411d062c9fba" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.320246 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.321104 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.323500 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.330839 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.333661 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.365242 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95a09db3-3fad-4c40-88da-5015d8f0468b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"95a09db3-3fad-4c40-88da-5015d8f0468b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.365825 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/95a09db3-3fad-4c40-88da-5015d8f0468b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"95a09db3-3fad-4c40-88da-5015d8f0468b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.382043 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k8tml" event={"ID":"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923","Type":"ContainerStarted","Data":"ee4a0af48e9dc8f4e6d3d56de65e5d9749df2df144d2a6aac078605552975fae"} Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.384933 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" event={"ID":"fbc07c99-ae2c-459e-9731-ca524c8bfa08","Type":"ContainerDied","Data":"37936b9f7ff47dcc3ccf4440660de8629720b7ccd6c8f088f44de3293f748ea6"} Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.385036 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37936b9f7ff47dcc3ccf4440660de8629720b7ccd6c8f088f44de3293f748ea6" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.389151 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.389193 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.427555 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:37:23 crc kubenswrapper[4755]: E0202 22:37:23.429088 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 02 22:37:23 crc kubenswrapper[4755]: E0202 22:37:23.430225 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2b6qc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-4hn7q_openshift-marketplace(4e73c4d3-3b24-40d0-af22-fbf37ec4716d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 22:37:23 crc kubenswrapper[4755]: E0202 22:37:23.432132 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-4hn7q" podUID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.471436 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbc07c99-ae2c-459e-9731-ca524c8bfa08-config\") pod \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.471517 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbc07c99-ae2c-459e-9731-ca524c8bfa08-serving-cert\") pod \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.471587 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-287x9\" (UniqueName: \"kubernetes.io/projected/fbc07c99-ae2c-459e-9731-ca524c8bfa08-kube-api-access-287x9\") pod \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.471821 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbc07c99-ae2c-459e-9731-ca524c8bfa08-client-ca\") pod \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\" (UID: \"fbc07c99-ae2c-459e-9731-ca524c8bfa08\") " Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.472126 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95a09db3-3fad-4c40-88da-5015d8f0468b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"95a09db3-3fad-4c40-88da-5015d8f0468b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.472185 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/95a09db3-3fad-4c40-88da-5015d8f0468b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"95a09db3-3fad-4c40-88da-5015d8f0468b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.472266 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/95a09db3-3fad-4c40-88da-5015d8f0468b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"95a09db3-3fad-4c40-88da-5015d8f0468b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.474220 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbc07c99-ae2c-459e-9731-ca524c8bfa08-client-ca" (OuterVolumeSpecName: "client-ca") pod "fbc07c99-ae2c-459e-9731-ca524c8bfa08" (UID: "fbc07c99-ae2c-459e-9731-ca524c8bfa08"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.475234 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbc07c99-ae2c-459e-9731-ca524c8bfa08-config" (OuterVolumeSpecName: "config") pod "fbc07c99-ae2c-459e-9731-ca524c8bfa08" (UID: "fbc07c99-ae2c-459e-9731-ca524c8bfa08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.489115 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbc07c99-ae2c-459e-9731-ca524c8bfa08-kube-api-access-287x9" (OuterVolumeSpecName: "kube-api-access-287x9") pod "fbc07c99-ae2c-459e-9731-ca524c8bfa08" (UID: "fbc07c99-ae2c-459e-9731-ca524c8bfa08"). InnerVolumeSpecName "kube-api-access-287x9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.490145 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbc07c99-ae2c-459e-9731-ca524c8bfa08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fbc07c99-ae2c-459e-9731-ca524c8bfa08" (UID: "fbc07c99-ae2c-459e-9731-ca524c8bfa08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.504429 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67"] Feb 02 22:37:23 crc kubenswrapper[4755]: E0202 22:37:23.504791 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbc07c99-ae2c-459e-9731-ca524c8bfa08" containerName="route-controller-manager" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.504811 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbc07c99-ae2c-459e-9731-ca524c8bfa08" containerName="route-controller-manager" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.504946 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbc07c99-ae2c-459e-9731-ca524c8bfa08" containerName="route-controller-manager" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.505532 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.509612 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95a09db3-3fad-4c40-88da-5015d8f0468b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"95a09db3-3fad-4c40-88da-5015d8f0468b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.513017 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67"] Feb 02 22:37:23 crc kubenswrapper[4755]: E0202 22:37:23.528064 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 02 22:37:23 crc kubenswrapper[4755]: E0202 22:37:23.528238 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kgb26,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-n98p2_openshift-marketplace(a4e6bc82-8080-4a17-8c5a-7b20eaec23bc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 22:37:23 crc kubenswrapper[4755]: E0202 22:37:23.529554 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-n98p2" podUID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.574158 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf88b16a-a2b9-4647-8187-af9d05a1a872-client-ca\") pod \"route-controller-manager-5477c7c5b-mgn67\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.574240 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf88b16a-a2b9-4647-8187-af9d05a1a872-serving-cert\") pod \"route-controller-manager-5477c7c5b-mgn67\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.574297 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5jzt\" (UniqueName: \"kubernetes.io/projected/cf88b16a-a2b9-4647-8187-af9d05a1a872-kube-api-access-x5jzt\") pod \"route-controller-manager-5477c7c5b-mgn67\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.574418 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf88b16a-a2b9-4647-8187-af9d05a1a872-config\") pod \"route-controller-manager-5477c7c5b-mgn67\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.574480 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fbc07c99-ae2c-459e-9731-ca524c8bfa08-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.574492 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbc07c99-ae2c-459e-9731-ca524c8bfa08-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.574502 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fbc07c99-ae2c-459e-9731-ca524c8bfa08-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.574513 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-287x9\" (UniqueName: \"kubernetes.io/projected/fbc07c99-ae2c-459e-9731-ca524c8bfa08-kube-api-access-287x9\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.675211 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5jzt\" (UniqueName: \"kubernetes.io/projected/cf88b16a-a2b9-4647-8187-af9d05a1a872-kube-api-access-x5jzt\") pod \"route-controller-manager-5477c7c5b-mgn67\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.675344 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf88b16a-a2b9-4647-8187-af9d05a1a872-config\") pod \"route-controller-manager-5477c7c5b-mgn67\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.675387 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf88b16a-a2b9-4647-8187-af9d05a1a872-client-ca\") pod \"route-controller-manager-5477c7c5b-mgn67\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.675407 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf88b16a-a2b9-4647-8187-af9d05a1a872-serving-cert\") pod \"route-controller-manager-5477c7c5b-mgn67\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.677369 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf88b16a-a2b9-4647-8187-af9d05a1a872-client-ca\") pod \"route-controller-manager-5477c7c5b-mgn67\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.677723 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf88b16a-a2b9-4647-8187-af9d05a1a872-config\") pod \"route-controller-manager-5477c7c5b-mgn67\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.681957 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf88b16a-a2b9-4647-8187-af9d05a1a872-serving-cert\") pod \"route-controller-manager-5477c7c5b-mgn67\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.706210 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5jzt\" (UniqueName: \"kubernetes.io/projected/cf88b16a-a2b9-4647-8187-af9d05a1a872-kube-api-access-x5jzt\") pod \"route-controller-manager-5477c7c5b-mgn67\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.768334 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.783488 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-b4698c5-6l92t"] Feb 02 22:37:23 crc kubenswrapper[4755]: W0202 22:37:23.797470 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c2531c3_95a3_40e1_9c2a_9ba6e87bb76f.slice/crio-a91c000dce566d837c4739262ec68f2b6c97a375ee9273691f468ed6456ccfc5 WatchSource:0}: Error finding container a91c000dce566d837c4739262ec68f2b6c97a375ee9273691f468ed6456ccfc5: Status 404 returned error can't find the container with id a91c000dce566d837c4739262ec68f2b6c97a375ee9273691f468ed6456ccfc5 Feb 02 22:37:23 crc kubenswrapper[4755]: I0202 22:37:23.828829 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:24 crc kubenswrapper[4755]: I0202 22:37:24.003798 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 22:37:24 crc kubenswrapper[4755]: W0202 22:37:24.010887 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod95a09db3_3fad_4c40_88da_5015d8f0468b.slice/crio-13634a504d810e29f87a0680a92f747211c85ab0b04a36c19bbda182de3c54bd WatchSource:0}: Error finding container 13634a504d810e29f87a0680a92f747211c85ab0b04a36c19bbda182de3c54bd: Status 404 returned error can't find the container with id 13634a504d810e29f87a0680a92f747211c85ab0b04a36c19bbda182de3c54bd Feb 02 22:37:24 crc kubenswrapper[4755]: I0202 22:37:24.060345 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67"] Feb 02 22:37:24 crc kubenswrapper[4755]: W0202 22:37:24.070686 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf88b16a_a2b9_4647_8187_af9d05a1a872.slice/crio-9f3d3e0f9aade1078b014b5e75a84489a799d48b68553def8ed9d026d3533239 WatchSource:0}: Error finding container 9f3d3e0f9aade1078b014b5e75a84489a799d48b68553def8ed9d026d3533239: Status 404 returned error can't find the container with id 9f3d3e0f9aade1078b014b5e75a84489a799d48b68553def8ed9d026d3533239 Feb 02 22:37:24 crc kubenswrapper[4755]: I0202 22:37:24.408378 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-n987n" event={"ID":"a6f21874-ee8f-4718-b2ab-8b4a97543364","Type":"ContainerStarted","Data":"6790c76ef0968895076f47fbb2c67d52206047985e472fa10c3fc4d811fe5837"} Feb 02 22:37:24 crc kubenswrapper[4755]: I0202 22:37:24.410553 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" event={"ID":"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f","Type":"ContainerStarted","Data":"a91c000dce566d837c4739262ec68f2b6c97a375ee9273691f468ed6456ccfc5"} Feb 02 22:37:24 crc kubenswrapper[4755]: I0202 22:37:24.411992 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" event={"ID":"cf88b16a-a2b9-4647-8187-af9d05a1a872","Type":"ContainerStarted","Data":"9f3d3e0f9aade1078b014b5e75a84489a799d48b68553def8ed9d026d3533239"} Feb 02 22:37:24 crc kubenswrapper[4755]: I0202 22:37:24.413843 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"95a09db3-3fad-4c40-88da-5015d8f0468b","Type":"ContainerStarted","Data":"13634a504d810e29f87a0680a92f747211c85ab0b04a36c19bbda182de3c54bd"} Feb 02 22:37:24 crc kubenswrapper[4755]: I0202 22:37:24.414496 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx" Feb 02 22:37:24 crc kubenswrapper[4755]: E0202 22:37:24.414910 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-4hn7q" podUID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" Feb 02 22:37:24 crc kubenswrapper[4755]: E0202 22:37:24.436900 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-n98p2" podUID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" Feb 02 22:37:24 crc kubenswrapper[4755]: I0202 22:37:24.484427 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx"] Feb 02 22:37:24 crc kubenswrapper[4755]: I0202 22:37:24.488744 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-m8cdx"] Feb 02 22:37:24 crc kubenswrapper[4755]: E0202 22:37:24.550249 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 02 22:37:24 crc kubenswrapper[4755]: E0202 22:37:24.550388 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zjrn8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-qnqzm_openshift-marketplace(3e39e3e7-7d3c-4fe3-bc48-459aecbf1391): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 22:37:24 crc kubenswrapper[4755]: E0202 22:37:24.551533 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-qnqzm" podUID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.079840 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbc07c99-ae2c-459e-9731-ca524c8bfa08" path="/var/lib/kubelet/pods/fbc07c99-ae2c-459e-9731-ca524c8bfa08/volumes" Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.420196 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"95a09db3-3fad-4c40-88da-5015d8f0468b","Type":"ContainerStarted","Data":"f0b1b8b6e150648dcdf936239aa95a415c45b6c21afe9680331843d09f3f4c85"} Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.423665 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k8tml" event={"ID":"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923","Type":"ContainerStarted","Data":"b91bf9da1b7706e19b4dda9b9d932a3de1682b35c67e1c5b6427a6e4a3ee133c"} Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.423699 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-k8tml" event={"ID":"3d013a2d-6ad7-40d5-b4f7-0f4b4eacc923","Type":"ContainerStarted","Data":"d68e4ccf7705846fe1ae8f5a4a884f5d0f8dc0f61df68828d0c40744a32e59a8"} Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.425664 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" event={"ID":"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f","Type":"ContainerStarted","Data":"7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22"} Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.426205 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.426509 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" podUID="4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f" containerName="controller-manager" containerID="cri-o://7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22" gracePeriod=30 Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.429438 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" event={"ID":"cf88b16a-a2b9-4647-8187-af9d05a1a872","Type":"ContainerStarted","Data":"1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf"} Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.430422 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.430982 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-n987n" Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.432469 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.432551 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:37:25 crc kubenswrapper[4755]: E0202 22:37:25.433053 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-qnqzm" podUID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.433265 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.437546 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.472780 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.47275736 podStartE2EDuration="2.47275736s" podCreationTimestamp="2026-02-02 22:37:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:37:25.450138584 +0000 UTC m=+201.141358930" watchObservedRunningTime="2026-02-02 22:37:25.47275736 +0000 UTC m=+201.163977696" Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.497089 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-k8tml" podStartSLOduration=174.497068574 podStartE2EDuration="2m54.497068574s" podCreationTimestamp="2026-02-02 22:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:37:25.471985608 +0000 UTC m=+201.163205984" watchObservedRunningTime="2026-02-02 22:37:25.497068574 +0000 UTC m=+201.188288910" Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.523380 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" podStartSLOduration=25.523353675 podStartE2EDuration="25.523353675s" podCreationTimestamp="2026-02-02 22:37:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:37:25.522289374 +0000 UTC m=+201.213509750" watchObservedRunningTime="2026-02-02 22:37:25.523353675 +0000 UTC m=+201.214574041" Feb 02 22:37:25 crc kubenswrapper[4755]: I0202 22:37:25.572943 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" podStartSLOduration=5.57291987 podStartE2EDuration="5.57291987s" podCreationTimestamp="2026-02-02 22:37:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:37:25.570071118 +0000 UTC m=+201.261291454" watchObservedRunningTime="2026-02-02 22:37:25.57291987 +0000 UTC m=+201.264140206" Feb 02 22:37:26 crc kubenswrapper[4755]: I0202 22:37:26.198947 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:37:26 crc kubenswrapper[4755]: I0202 22:37:26.198947 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:37:26 crc kubenswrapper[4755]: I0202 22:37:26.199006 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:37:26 crc kubenswrapper[4755]: I0202 22:37:26.199018 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:37:26 crc kubenswrapper[4755]: I0202 22:37:26.379439 4755 patch_prober.go:28] interesting pod/controller-manager-b4698c5-6l92t container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.54:8443/healthz\": read tcp 10.217.0.2:54852->10.217.0.54:8443: read: connection reset by peer" start-of-body= Feb 02 22:37:26 crc kubenswrapper[4755]: I0202 22:37:26.379529 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" podUID="4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.54:8443/healthz\": read tcp 10.217.0.2:54852->10.217.0.54:8443: read: connection reset by peer" Feb 02 22:37:26 crc kubenswrapper[4755]: I0202 22:37:26.436132 4755 patch_prober.go:28] interesting pod/downloads-7954f5f757-n987n container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Feb 02 22:37:26 crc kubenswrapper[4755]: I0202 22:37:26.436219 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-n987n" podUID="a6f21874-ee8f-4718-b2ab-8b4a97543364" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.011447 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.036456 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-client-ca\") pod \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.036633 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-serving-cert\") pod \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.040247 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-client-ca" (OuterVolumeSpecName: "client-ca") pod "4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f" (UID: "4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.042971 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-config\") pod \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.043040 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-proxy-ca-bundles\") pod \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.043122 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b87mc\" (UniqueName: \"kubernetes.io/projected/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-kube-api-access-b87mc\") pod \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\" (UID: \"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f\") " Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.043659 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.043806 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f" (UID: "4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.044114 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-config" (OuterVolumeSpecName: "config") pod "4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f" (UID: "4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.045344 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f" (UID: "4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.053601 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5b9787fb89-npsks"] Feb 02 22:37:27 crc kubenswrapper[4755]: E0202 22:37:27.054174 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f" containerName="controller-manager" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.054278 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f" containerName="controller-manager" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.054497 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f" containerName="controller-manager" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.055048 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.059173 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-kube-api-access-b87mc" (OuterVolumeSpecName: "kube-api-access-b87mc") pod "4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f" (UID: "4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f"). InnerVolumeSpecName "kube-api-access-b87mc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.065586 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5b9787fb89-npsks"] Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.144792 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-config\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.144942 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-client-ca\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.144992 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-proxy-ca-bundles\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.145021 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46d93868-8f7e-44c2-905d-17c8570b9213-serving-cert\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.145073 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f55fd\" (UniqueName: \"kubernetes.io/projected/46d93868-8f7e-44c2-905d-17c8570b9213-kube-api-access-f55fd\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.145213 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.145248 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.145263 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.145279 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b87mc\" (UniqueName: \"kubernetes.io/projected/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f-kube-api-access-b87mc\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.245645 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f55fd\" (UniqueName: \"kubernetes.io/projected/46d93868-8f7e-44c2-905d-17c8570b9213-kube-api-access-f55fd\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.245771 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-config\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.245842 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-client-ca\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.245878 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-proxy-ca-bundles\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.245910 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46d93868-8f7e-44c2-905d-17c8570b9213-serving-cert\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.246973 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-client-ca\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.247312 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-proxy-ca-bundles\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.247633 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-config\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.250607 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46d93868-8f7e-44c2-905d-17c8570b9213-serving-cert\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.277581 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f55fd\" (UniqueName: \"kubernetes.io/projected/46d93868-8f7e-44c2-905d-17c8570b9213-kube-api-access-f55fd\") pod \"controller-manager-5b9787fb89-npsks\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.398067 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.445542 4755 generic.go:334] "Generic (PLEG): container finished" podID="4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f" containerID="7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22" exitCode=0 Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.445653 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" event={"ID":"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f","Type":"ContainerDied","Data":"7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22"} Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.445657 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.445699 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-b4698c5-6l92t" event={"ID":"4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f","Type":"ContainerDied","Data":"a91c000dce566d837c4739262ec68f2b6c97a375ee9273691f468ed6456ccfc5"} Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.445765 4755 scope.go:117] "RemoveContainer" containerID="7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.455911 4755 generic.go:334] "Generic (PLEG): container finished" podID="95a09db3-3fad-4c40-88da-5015d8f0468b" containerID="f0b1b8b6e150648dcdf936239aa95a415c45b6c21afe9680331843d09f3f4c85" exitCode=0 Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.456034 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"95a09db3-3fad-4c40-88da-5015d8f0468b","Type":"ContainerDied","Data":"f0b1b8b6e150648dcdf936239aa95a415c45b6c21afe9680331843d09f3f4c85"} Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.482079 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-b4698c5-6l92t"] Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.484516 4755 scope.go:117] "RemoveContainer" containerID="7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.486550 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-b4698c5-6l92t"] Feb 02 22:37:27 crc kubenswrapper[4755]: E0202 22:37:27.487165 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22\": container with ID starting with 7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22 not found: ID does not exist" containerID="7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.487212 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22"} err="failed to get container status \"7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22\": rpc error: code = NotFound desc = could not find container \"7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22\": container with ID starting with 7eb9699c3b1f2000c8d07b6d8ec4349f55aa58fa69b61f0b0b923fdf5f125f22 not found: ID does not exist" Feb 02 22:37:27 crc kubenswrapper[4755]: I0202 22:37:27.946252 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5b9787fb89-npsks"] Feb 02 22:37:27 crc kubenswrapper[4755]: W0202 22:37:27.956874 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46d93868_8f7e_44c2_905d_17c8570b9213.slice/crio-3f05ec48153175798c9c0a2d86e45ccf02a2e0c261d250770ebc9108acc3bab6 WatchSource:0}: Error finding container 3f05ec48153175798c9c0a2d86e45ccf02a2e0c261d250770ebc9108acc3bab6: Status 404 returned error can't find the container with id 3f05ec48153175798c9c0a2d86e45ccf02a2e0c261d250770ebc9108acc3bab6 Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.121528 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.122629 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.138874 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.163307 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-var-lock\") pod \"installer-9-crc\" (UID: \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.163400 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-kube-api-access\") pod \"installer-9-crc\" (UID: \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.163459 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-kubelet-dir\") pod \"installer-9-crc\" (UID: \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.264216 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-kubelet-dir\") pod \"installer-9-crc\" (UID: \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.264566 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-var-lock\") pod \"installer-9-crc\" (UID: \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.264341 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-kubelet-dir\") pod \"installer-9-crc\" (UID: \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.264674 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-var-lock\") pod \"installer-9-crc\" (UID: \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.264608 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-kube-api-access\") pod \"installer-9-crc\" (UID: \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.284475 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-kube-api-access\") pod \"installer-9-crc\" (UID: \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.449810 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:37:28 crc kubenswrapper[4755]: I0202 22:37:28.463683 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" event={"ID":"46d93868-8f7e-44c2-905d-17c8570b9213","Type":"ContainerStarted","Data":"3f05ec48153175798c9c0a2d86e45ccf02a2e0c261d250770ebc9108acc3bab6"} Feb 02 22:37:29 crc kubenswrapper[4755]: I0202 22:37:29.081364 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f" path="/var/lib/kubelet/pods/4c2531c3-95a3-40e1-9c2a-9ba6e87bb76f/volumes" Feb 02 22:37:29 crc kubenswrapper[4755]: I0202 22:37:29.606469 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 22:37:30 crc kubenswrapper[4755]: I0202 22:37:30.476671 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" event={"ID":"46d93868-8f7e-44c2-905d-17c8570b9213","Type":"ContainerStarted","Data":"84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79"} Feb 02 22:37:30 crc kubenswrapper[4755]: I0202 22:37:30.476928 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:30 crc kubenswrapper[4755]: I0202 22:37:30.480799 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:37:30 crc kubenswrapper[4755]: I0202 22:37:30.511331 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" podStartSLOduration=10.511316474000001 podStartE2EDuration="10.511316474s" podCreationTimestamp="2026-02-02 22:37:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:37:30.495694518 +0000 UTC m=+206.186914844" watchObservedRunningTime="2026-02-02 22:37:30.511316474 +0000 UTC m=+206.202536800" Feb 02 22:37:32 crc kubenswrapper[4755]: I0202 22:37:32.939441 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 22:37:33 crc kubenswrapper[4755]: I0202 22:37:33.030529 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/95a09db3-3fad-4c40-88da-5015d8f0468b-kubelet-dir\") pod \"95a09db3-3fad-4c40-88da-5015d8f0468b\" (UID: \"95a09db3-3fad-4c40-88da-5015d8f0468b\") " Feb 02 22:37:33 crc kubenswrapper[4755]: I0202 22:37:33.030680 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95a09db3-3fad-4c40-88da-5015d8f0468b-kube-api-access\") pod \"95a09db3-3fad-4c40-88da-5015d8f0468b\" (UID: \"95a09db3-3fad-4c40-88da-5015d8f0468b\") " Feb 02 22:37:33 crc kubenswrapper[4755]: I0202 22:37:33.031795 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/95a09db3-3fad-4c40-88da-5015d8f0468b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "95a09db3-3fad-4c40-88da-5015d8f0468b" (UID: "95a09db3-3fad-4c40-88da-5015d8f0468b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:37:33 crc kubenswrapper[4755]: I0202 22:37:33.045070 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95a09db3-3fad-4c40-88da-5015d8f0468b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "95a09db3-3fad-4c40-88da-5015d8f0468b" (UID: "95a09db3-3fad-4c40-88da-5015d8f0468b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:37:33 crc kubenswrapper[4755]: I0202 22:37:33.132170 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95a09db3-3fad-4c40-88da-5015d8f0468b-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:33 crc kubenswrapper[4755]: I0202 22:37:33.132261 4755 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/95a09db3-3fad-4c40-88da-5015d8f0468b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:33 crc kubenswrapper[4755]: I0202 22:37:33.501968 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z4gbr" event={"ID":"26f31363-966f-44cb-8cf1-fc6b071dad2b","Type":"ContainerStarted","Data":"0c3a1e5f5253d1fdc1ec004023632b62effb676905a5117cb05c662c6720b50d"} Feb 02 22:37:33 crc kubenswrapper[4755]: I0202 22:37:33.506240 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"95a09db3-3fad-4c40-88da-5015d8f0468b","Type":"ContainerDied","Data":"13634a504d810e29f87a0680a92f747211c85ab0b04a36c19bbda182de3c54bd"} Feb 02 22:37:33 crc kubenswrapper[4755]: I0202 22:37:33.506284 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13634a504d810e29f87a0680a92f747211c85ab0b04a36c19bbda182de3c54bd" Feb 02 22:37:33 crc kubenswrapper[4755]: I0202 22:37:33.506349 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 22:37:33 crc kubenswrapper[4755]: I0202 22:37:33.514238 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"371c3c30-c3ba-47a1-bc16-9b0a621be7b3","Type":"ContainerStarted","Data":"24b07be36096619dec532c46f99e6bcdb53ef71bfa871981bb785c6e2fa845d8"} Feb 02 22:37:34 crc kubenswrapper[4755]: I0202 22:37:34.523363 4755 generic.go:334] "Generic (PLEG): container finished" podID="2025f58f-a8e1-4009-a95b-946aca049871" containerID="7b958054b761b58b833bc12aee02d771e474703923516043a6adb0416bc780a6" exitCode=0 Feb 02 22:37:34 crc kubenswrapper[4755]: I0202 22:37:34.523430 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9k7fx" event={"ID":"2025f58f-a8e1-4009-a95b-946aca049871","Type":"ContainerDied","Data":"7b958054b761b58b833bc12aee02d771e474703923516043a6adb0416bc780a6"} Feb 02 22:37:34 crc kubenswrapper[4755]: I0202 22:37:34.526584 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"371c3c30-c3ba-47a1-bc16-9b0a621be7b3","Type":"ContainerStarted","Data":"4b768d5da3c4176d1e11cad616eb628c4010e0029d971d595d4840bd5685ea43"} Feb 02 22:37:34 crc kubenswrapper[4755]: I0202 22:37:34.531763 4755 generic.go:334] "Generic (PLEG): container finished" podID="26f31363-966f-44cb-8cf1-fc6b071dad2b" containerID="0c3a1e5f5253d1fdc1ec004023632b62effb676905a5117cb05c662c6720b50d" exitCode=0 Feb 02 22:37:34 crc kubenswrapper[4755]: I0202 22:37:34.531826 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z4gbr" event={"ID":"26f31363-966f-44cb-8cf1-fc6b071dad2b","Type":"ContainerDied","Data":"0c3a1e5f5253d1fdc1ec004023632b62effb676905a5117cb05c662c6720b50d"} Feb 02 22:37:34 crc kubenswrapper[4755]: I0202 22:37:34.570602 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=6.57058161 podStartE2EDuration="6.57058161s" podCreationTimestamp="2026-02-02 22:37:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:37:34.570072775 +0000 UTC m=+210.261293141" watchObservedRunningTime="2026-02-02 22:37:34.57058161 +0000 UTC m=+210.261801936" Feb 02 22:37:35 crc kubenswrapper[4755]: I0202 22:37:35.539963 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-78h9v" event={"ID":"8b90fc36-b80c-4011-926b-b1579c7d0ada","Type":"ContainerStarted","Data":"685fc9cf90f0bb87af8362b57de42861288e1390b96129cd0b7f4b63a208748a"} Feb 02 22:37:35 crc kubenswrapper[4755]: I0202 22:37:35.543478 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z4gbr" event={"ID":"26f31363-966f-44cb-8cf1-fc6b071dad2b","Type":"ContainerStarted","Data":"72388ffe20cf4deceabe7a126efec53dd5cd2f98a68cf04a1bed86ffb5035238"} Feb 02 22:37:35 crc kubenswrapper[4755]: I0202 22:37:35.545291 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4db9" event={"ID":"378f0e3d-577f-4e51-a994-411d062c9fba","Type":"ContainerStarted","Data":"4d141543f4d2f096c49e2f69ae029b778536d934e88b6f09df0da6cd658ec22d"} Feb 02 22:37:35 crc kubenswrapper[4755]: I0202 22:37:35.547141 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9k7fx" event={"ID":"2025f58f-a8e1-4009-a95b-946aca049871","Type":"ContainerStarted","Data":"b4e4a7206f9adfc2965df1140c3bbb7e5ad975592de4c1b15fd1907ba58d5add"} Feb 02 22:37:35 crc kubenswrapper[4755]: I0202 22:37:35.580076 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9k7fx" podStartSLOduration=2.399122448 podStartE2EDuration="52.580056032s" podCreationTimestamp="2026-02-02 22:36:43 +0000 UTC" firstStartedPulling="2026-02-02 22:36:44.970075208 +0000 UTC m=+160.661295534" lastFinishedPulling="2026-02-02 22:37:35.151008792 +0000 UTC m=+210.842229118" observedRunningTime="2026-02-02 22:37:35.576224402 +0000 UTC m=+211.267444738" watchObservedRunningTime="2026-02-02 22:37:35.580056032 +0000 UTC m=+211.271276368" Feb 02 22:37:35 crc kubenswrapper[4755]: I0202 22:37:35.609436 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z4gbr" podStartSLOduration=2.455170228 podStartE2EDuration="52.60941732s" podCreationTimestamp="2026-02-02 22:36:43 +0000 UTC" firstStartedPulling="2026-02-02 22:36:44.984232542 +0000 UTC m=+160.675452908" lastFinishedPulling="2026-02-02 22:37:35.138479664 +0000 UTC m=+210.829700000" observedRunningTime="2026-02-02 22:37:35.605981032 +0000 UTC m=+211.297201348" watchObservedRunningTime="2026-02-02 22:37:35.60941732 +0000 UTC m=+211.300637646" Feb 02 22:37:36 crc kubenswrapper[4755]: I0202 22:37:36.216944 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-n987n" Feb 02 22:37:36 crc kubenswrapper[4755]: I0202 22:37:36.554813 4755 generic.go:334] "Generic (PLEG): container finished" podID="8b90fc36-b80c-4011-926b-b1579c7d0ada" containerID="685fc9cf90f0bb87af8362b57de42861288e1390b96129cd0b7f4b63a208748a" exitCode=0 Feb 02 22:37:36 crc kubenswrapper[4755]: I0202 22:37:36.554901 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-78h9v" event={"ID":"8b90fc36-b80c-4011-926b-b1579c7d0ada","Type":"ContainerDied","Data":"685fc9cf90f0bb87af8362b57de42861288e1390b96129cd0b7f4b63a208748a"} Feb 02 22:37:36 crc kubenswrapper[4755]: I0202 22:37:36.558430 4755 generic.go:334] "Generic (PLEG): container finished" podID="378f0e3d-577f-4e51-a994-411d062c9fba" containerID="4d141543f4d2f096c49e2f69ae029b778536d934e88b6f09df0da6cd658ec22d" exitCode=0 Feb 02 22:37:36 crc kubenswrapper[4755]: I0202 22:37:36.558452 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4db9" event={"ID":"378f0e3d-577f-4e51-a994-411d062c9fba","Type":"ContainerDied","Data":"4d141543f4d2f096c49e2f69ae029b778536d934e88b6f09df0da6cd658ec22d"} Feb 02 22:37:37 crc kubenswrapper[4755]: E0202 22:37:37.086707 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4e6bc82_8080_4a17_8c5a_7b20eaec23bc.slice/crio-conmon-455f8292f902ab1d9123d4b9b2838b96db73491a11106b1932da029b96749f05.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4e6bc82_8080_4a17_8c5a_7b20eaec23bc.slice/crio-455f8292f902ab1d9123d4b9b2838b96db73491a11106b1932da029b96749f05.scope\": RecentStats: unable to find data in memory cache]" Feb 02 22:37:37 crc kubenswrapper[4755]: I0202 22:37:37.571862 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4db9" event={"ID":"378f0e3d-577f-4e51-a994-411d062c9fba","Type":"ContainerStarted","Data":"c19fc5733662fc0a3b611cc988fcf6be578d23aa4143aeac706b1098bdf6d3cd"} Feb 02 22:37:37 crc kubenswrapper[4755]: I0202 22:37:37.573484 4755 generic.go:334] "Generic (PLEG): container finished" podID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" containerID="040f52e1e9f1cf06abd6637c3f09f736e4982d0d1ba26e5505d6ae9de5f987cb" exitCode=0 Feb 02 22:37:37 crc kubenswrapper[4755]: I0202 22:37:37.573580 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-964gh" event={"ID":"83ede4e1-292f-40c0-8e1f-cc44190a0c92","Type":"ContainerDied","Data":"040f52e1e9f1cf06abd6637c3f09f736e4982d0d1ba26e5505d6ae9de5f987cb"} Feb 02 22:37:37 crc kubenswrapper[4755]: I0202 22:37:37.578316 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-78h9v" event={"ID":"8b90fc36-b80c-4011-926b-b1579c7d0ada","Type":"ContainerStarted","Data":"e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c"} Feb 02 22:37:37 crc kubenswrapper[4755]: I0202 22:37:37.584899 4755 generic.go:334] "Generic (PLEG): container finished" podID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" containerID="455f8292f902ab1d9123d4b9b2838b96db73491a11106b1932da029b96749f05" exitCode=0 Feb 02 22:37:37 crc kubenswrapper[4755]: I0202 22:37:37.584937 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n98p2" event={"ID":"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc","Type":"ContainerDied","Data":"455f8292f902ab1d9123d4b9b2838b96db73491a11106b1932da029b96749f05"} Feb 02 22:37:37 crc kubenswrapper[4755]: I0202 22:37:37.602159 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g4db9" podStartSLOduration=2.60928232 podStartE2EDuration="53.602141163s" podCreationTimestamp="2026-02-02 22:36:44 +0000 UTC" firstStartedPulling="2026-02-02 22:36:45.997083431 +0000 UTC m=+161.688303757" lastFinishedPulling="2026-02-02 22:37:36.989942274 +0000 UTC m=+212.681162600" observedRunningTime="2026-02-02 22:37:37.599653062 +0000 UTC m=+213.290873388" watchObservedRunningTime="2026-02-02 22:37:37.602141163 +0000 UTC m=+213.293361499" Feb 02 22:37:37 crc kubenswrapper[4755]: I0202 22:37:37.632963 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-78h9v" podStartSLOduration=2.529382317 podStartE2EDuration="54.632928702s" podCreationTimestamp="2026-02-02 22:36:43 +0000 UTC" firstStartedPulling="2026-02-02 22:36:44.976820291 +0000 UTC m=+160.668040617" lastFinishedPulling="2026-02-02 22:37:37.080366666 +0000 UTC m=+212.771587002" observedRunningTime="2026-02-02 22:37:37.62830785 +0000 UTC m=+213.319528196" watchObservedRunningTime="2026-02-02 22:37:37.632928702 +0000 UTC m=+213.324149068" Feb 02 22:37:38 crc kubenswrapper[4755]: I0202 22:37:38.591831 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n98p2" event={"ID":"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc","Type":"ContainerStarted","Data":"17d28346ef002acffb09ee567460dd45a2ae3d77c5d27a6bad80718fb7e09a8c"} Feb 02 22:37:38 crc kubenswrapper[4755]: I0202 22:37:38.593415 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-964gh" event={"ID":"83ede4e1-292f-40c0-8e1f-cc44190a0c92","Type":"ContainerStarted","Data":"f5293d7deb4b1c941325897664b50e44b1e6016d741c3f25d47daede2b025b5c"} Feb 02 22:37:38 crc kubenswrapper[4755]: I0202 22:37:38.614553 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n98p2" podStartSLOduration=2.636699835 podStartE2EDuration="53.614531898s" podCreationTimestamp="2026-02-02 22:36:45 +0000 UTC" firstStartedPulling="2026-02-02 22:36:47.03305182 +0000 UTC m=+162.724272146" lastFinishedPulling="2026-02-02 22:37:38.010883873 +0000 UTC m=+213.702104209" observedRunningTime="2026-02-02 22:37:38.60828928 +0000 UTC m=+214.299509616" watchObservedRunningTime="2026-02-02 22:37:38.614531898 +0000 UTC m=+214.305752234" Feb 02 22:37:38 crc kubenswrapper[4755]: I0202 22:37:38.629181 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-964gh" podStartSLOduration=2.459145927 podStartE2EDuration="56.629164966s" podCreationTimestamp="2026-02-02 22:36:42 +0000 UTC" firstStartedPulling="2026-02-02 22:36:43.922455064 +0000 UTC m=+159.613675390" lastFinishedPulling="2026-02-02 22:37:38.092474103 +0000 UTC m=+213.783694429" observedRunningTime="2026-02-02 22:37:38.625723178 +0000 UTC m=+214.316943504" watchObservedRunningTime="2026-02-02 22:37:38.629164966 +0000 UTC m=+214.320385292" Feb 02 22:37:39 crc kubenswrapper[4755]: I0202 22:37:39.600369 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qnqzm" event={"ID":"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391","Type":"ContainerStarted","Data":"9e9b2d91ca2a770cb9ef990189b905dd31c95835af09f3f0b7fb4b9722bd302c"} Feb 02 22:37:40 crc kubenswrapper[4755]: I0202 22:37:40.626572 4755 generic.go:334] "Generic (PLEG): container finished" podID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" containerID="9e9b2d91ca2a770cb9ef990189b905dd31c95835af09f3f0b7fb4b9722bd302c" exitCode=0 Feb 02 22:37:40 crc kubenswrapper[4755]: I0202 22:37:40.626660 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qnqzm" event={"ID":"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391","Type":"ContainerDied","Data":"9e9b2d91ca2a770cb9ef990189b905dd31c95835af09f3f0b7fb4b9722bd302c"} Feb 02 22:37:41 crc kubenswrapper[4755]: I0202 22:37:41.633904 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qnqzm" event={"ID":"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391","Type":"ContainerStarted","Data":"1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e"} Feb 02 22:37:41 crc kubenswrapper[4755]: I0202 22:37:41.637172 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4hn7q" event={"ID":"4e73c4d3-3b24-40d0-af22-fbf37ec4716d","Type":"ContainerStarted","Data":"f8174a16253f429291d86dfd79669a2201a09e22a9ba6533f0fbc1e237b753de"} Feb 02 22:37:41 crc kubenswrapper[4755]: I0202 22:37:41.660201 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qnqzm" podStartSLOduration=3.290112315 podStartE2EDuration="55.660183054s" podCreationTimestamp="2026-02-02 22:36:46 +0000 UTC" firstStartedPulling="2026-02-02 22:36:49.061794356 +0000 UTC m=+164.753014682" lastFinishedPulling="2026-02-02 22:37:41.431865095 +0000 UTC m=+217.123085421" observedRunningTime="2026-02-02 22:37:41.657626041 +0000 UTC m=+217.348846387" watchObservedRunningTime="2026-02-02 22:37:41.660183054 +0000 UTC m=+217.351403380" Feb 02 22:37:42 crc kubenswrapper[4755]: I0202 22:37:42.648845 4755 generic.go:334] "Generic (PLEG): container finished" podID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" containerID="f8174a16253f429291d86dfd79669a2201a09e22a9ba6533f0fbc1e237b753de" exitCode=0 Feb 02 22:37:42 crc kubenswrapper[4755]: I0202 22:37:42.648887 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4hn7q" event={"ID":"4e73c4d3-3b24-40d0-af22-fbf37ec4716d","Type":"ContainerDied","Data":"f8174a16253f429291d86dfd79669a2201a09e22a9ba6533f0fbc1e237b753de"} Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.248911 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-964gh" Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.249211 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-964gh" Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.467652 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.467747 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.672330 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-964gh" Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.672429 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.721316 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-964gh" Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.921490 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.921539 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.938331 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.938371 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.971435 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:37:43 crc kubenswrapper[4755]: I0202 22:37:43.990928 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:37:44 crc kubenswrapper[4755]: I0202 22:37:44.678627 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4hn7q" event={"ID":"4e73c4d3-3b24-40d0-af22-fbf37ec4716d","Type":"ContainerStarted","Data":"5deff95394140bce4d5ccd1cdd4205a7b19b855cee35d71b424737a91bd19b1f"} Feb 02 22:37:44 crc kubenswrapper[4755]: I0202 22:37:44.713621 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4hn7q" podStartSLOduration=3.305508111 podStartE2EDuration="58.713603041s" podCreationTimestamp="2026-02-02 22:36:46 +0000 UTC" firstStartedPulling="2026-02-02 22:36:48.039096115 +0000 UTC m=+163.730316441" lastFinishedPulling="2026-02-02 22:37:43.447191045 +0000 UTC m=+219.138411371" observedRunningTime="2026-02-02 22:37:44.712888771 +0000 UTC m=+220.404109137" watchObservedRunningTime="2026-02-02 22:37:44.713603041 +0000 UTC m=+220.404823377" Feb 02 22:37:44 crc kubenswrapper[4755]: I0202 22:37:44.736386 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:37:44 crc kubenswrapper[4755]: I0202 22:37:44.738819 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:37:44 crc kubenswrapper[4755]: I0202 22:37:44.739383 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:37:45 crc kubenswrapper[4755]: I0202 22:37:45.268231 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:37:45 crc kubenswrapper[4755]: I0202 22:37:45.268654 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:37:45 crc kubenswrapper[4755]: I0202 22:37:45.314316 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:37:45 crc kubenswrapper[4755]: I0202 22:37:45.673764 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:37:45 crc kubenswrapper[4755]: I0202 22:37:45.673810 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:37:45 crc kubenswrapper[4755]: I0202 22:37:45.720621 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:37:45 crc kubenswrapper[4755]: I0202 22:37:45.731985 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:37:45 crc kubenswrapper[4755]: I0202 22:37:45.768983 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:37:45 crc kubenswrapper[4755]: I0202 22:37:45.962944 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z4gbr"] Feb 02 22:37:46 crc kubenswrapper[4755]: I0202 22:37:46.582986 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:37:46 crc kubenswrapper[4755]: I0202 22:37:46.583034 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:37:46 crc kubenswrapper[4755]: I0202 22:37:46.700587 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z4gbr" podUID="26f31363-966f-44cb-8cf1-fc6b071dad2b" containerName="registry-server" containerID="cri-o://72388ffe20cf4deceabe7a126efec53dd5cd2f98a68cf04a1bed86ffb5035238" gracePeriod=2 Feb 02 22:37:46 crc kubenswrapper[4755]: I0202 22:37:46.866248 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:37:46 crc kubenswrapper[4755]: I0202 22:37:46.866588 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:37:47 crc kubenswrapper[4755]: I0202 22:37:47.627075 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4hn7q" podUID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" containerName="registry-server" probeResult="failure" output=< Feb 02 22:37:47 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Feb 02 22:37:47 crc kubenswrapper[4755]: > Feb 02 22:37:47 crc kubenswrapper[4755]: I0202 22:37:47.708056 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z4gbr" event={"ID":"26f31363-966f-44cb-8cf1-fc6b071dad2b","Type":"ContainerDied","Data":"72388ffe20cf4deceabe7a126efec53dd5cd2f98a68cf04a1bed86ffb5035238"} Feb 02 22:37:47 crc kubenswrapper[4755]: I0202 22:37:47.708009 4755 generic.go:334] "Generic (PLEG): container finished" podID="26f31363-966f-44cb-8cf1-fc6b071dad2b" containerID="72388ffe20cf4deceabe7a126efec53dd5cd2f98a68cf04a1bed86ffb5035238" exitCode=0 Feb 02 22:37:47 crc kubenswrapper[4755]: I0202 22:37:47.763556 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-78h9v"] Feb 02 22:37:47 crc kubenswrapper[4755]: I0202 22:37:47.763846 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-78h9v" podUID="8b90fc36-b80c-4011-926b-b1579c7d0ada" containerName="registry-server" containerID="cri-o://e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c" gracePeriod=2 Feb 02 22:37:47 crc kubenswrapper[4755]: I0202 22:37:47.924014 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qnqzm" podUID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" containerName="registry-server" probeResult="failure" output=< Feb 02 22:37:47 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Feb 02 22:37:47 crc kubenswrapper[4755]: > Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.201797 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.341641 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phqf4\" (UniqueName: \"kubernetes.io/projected/8b90fc36-b80c-4011-926b-b1579c7d0ada-kube-api-access-phqf4\") pod \"8b90fc36-b80c-4011-926b-b1579c7d0ada\" (UID: \"8b90fc36-b80c-4011-926b-b1579c7d0ada\") " Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.341712 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b90fc36-b80c-4011-926b-b1579c7d0ada-catalog-content\") pod \"8b90fc36-b80c-4011-926b-b1579c7d0ada\" (UID: \"8b90fc36-b80c-4011-926b-b1579c7d0ada\") " Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.341782 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b90fc36-b80c-4011-926b-b1579c7d0ada-utilities\") pod \"8b90fc36-b80c-4011-926b-b1579c7d0ada\" (UID: \"8b90fc36-b80c-4011-926b-b1579c7d0ada\") " Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.343207 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b90fc36-b80c-4011-926b-b1579c7d0ada-utilities" (OuterVolumeSpecName: "utilities") pod "8b90fc36-b80c-4011-926b-b1579c7d0ada" (UID: "8b90fc36-b80c-4011-926b-b1579c7d0ada"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.349998 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b90fc36-b80c-4011-926b-b1579c7d0ada-kube-api-access-phqf4" (OuterVolumeSpecName: "kube-api-access-phqf4") pod "8b90fc36-b80c-4011-926b-b1579c7d0ada" (UID: "8b90fc36-b80c-4011-926b-b1579c7d0ada"). InnerVolumeSpecName "kube-api-access-phqf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.361554 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n98p2"] Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.361832 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n98p2" podUID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" containerName="registry-server" containerID="cri-o://17d28346ef002acffb09ee567460dd45a2ae3d77c5d27a6bad80718fb7e09a8c" gracePeriod=2 Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.416099 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b90fc36-b80c-4011-926b-b1579c7d0ada-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b90fc36-b80c-4011-926b-b1579c7d0ada" (UID: "8b90fc36-b80c-4011-926b-b1579c7d0ada"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.443637 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phqf4\" (UniqueName: \"kubernetes.io/projected/8b90fc36-b80c-4011-926b-b1579c7d0ada-kube-api-access-phqf4\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.443684 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b90fc36-b80c-4011-926b-b1579c7d0ada-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.443694 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b90fc36-b80c-4011-926b-b1579c7d0ada-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.510131 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.646347 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26f31363-966f-44cb-8cf1-fc6b071dad2b-utilities\") pod \"26f31363-966f-44cb-8cf1-fc6b071dad2b\" (UID: \"26f31363-966f-44cb-8cf1-fc6b071dad2b\") " Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.646421 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26f31363-966f-44cb-8cf1-fc6b071dad2b-catalog-content\") pod \"26f31363-966f-44cb-8cf1-fc6b071dad2b\" (UID: \"26f31363-966f-44cb-8cf1-fc6b071dad2b\") " Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.646465 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qvwk\" (UniqueName: \"kubernetes.io/projected/26f31363-966f-44cb-8cf1-fc6b071dad2b-kube-api-access-6qvwk\") pod \"26f31363-966f-44cb-8cf1-fc6b071dad2b\" (UID: \"26f31363-966f-44cb-8cf1-fc6b071dad2b\") " Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.647034 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26f31363-966f-44cb-8cf1-fc6b071dad2b-utilities" (OuterVolumeSpecName: "utilities") pod "26f31363-966f-44cb-8cf1-fc6b071dad2b" (UID: "26f31363-966f-44cb-8cf1-fc6b071dad2b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.649438 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26f31363-966f-44cb-8cf1-fc6b071dad2b-kube-api-access-6qvwk" (OuterVolumeSpecName: "kube-api-access-6qvwk") pod "26f31363-966f-44cb-8cf1-fc6b071dad2b" (UID: "26f31363-966f-44cb-8cf1-fc6b071dad2b"). InnerVolumeSpecName "kube-api-access-6qvwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.695973 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26f31363-966f-44cb-8cf1-fc6b071dad2b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "26f31363-966f-44cb-8cf1-fc6b071dad2b" (UID: "26f31363-966f-44cb-8cf1-fc6b071dad2b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.715313 4755 generic.go:334] "Generic (PLEG): container finished" podID="8b90fc36-b80c-4011-926b-b1579c7d0ada" containerID="e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c" exitCode=0 Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.715380 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-78h9v" event={"ID":"8b90fc36-b80c-4011-926b-b1579c7d0ada","Type":"ContainerDied","Data":"e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c"} Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.715378 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-78h9v" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.715418 4755 scope.go:117] "RemoveContainer" containerID="e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.715408 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-78h9v" event={"ID":"8b90fc36-b80c-4011-926b-b1579c7d0ada","Type":"ContainerDied","Data":"3cc194ab3c1e247e1db6d3500c8b3d151069ce1da8f52bd365db339b91bc59f0"} Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.717918 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z4gbr" event={"ID":"26f31363-966f-44cb-8cf1-fc6b071dad2b","Type":"ContainerDied","Data":"99ea86494fcbd867e623ca51d99dd9a20974174ef13bb9e716db0e3a4fbf8ed8"} Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.717948 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z4gbr" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.736178 4755 scope.go:117] "RemoveContainer" containerID="685fc9cf90f0bb87af8362b57de42861288e1390b96129cd0b7f4b63a208748a" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.744880 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-78h9v"] Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.748075 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26f31363-966f-44cb-8cf1-fc6b071dad2b-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.748094 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26f31363-966f-44cb-8cf1-fc6b071dad2b-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.748104 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qvwk\" (UniqueName: \"kubernetes.io/projected/26f31363-966f-44cb-8cf1-fc6b071dad2b-kube-api-access-6qvwk\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.750821 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-78h9v"] Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.757288 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z4gbr"] Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.765187 4755 scope.go:117] "RemoveContainer" containerID="ac59bda1ebc7ad3154c1a9781f4674ef4a22fd38b94e0b88636ce5bb85ffc547" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.768238 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z4gbr"] Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.781157 4755 scope.go:117] "RemoveContainer" containerID="e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c" Feb 02 22:37:48 crc kubenswrapper[4755]: E0202 22:37:48.781651 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c\": container with ID starting with e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c not found: ID does not exist" containerID="e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.781696 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c"} err="failed to get container status \"e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c\": rpc error: code = NotFound desc = could not find container \"e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c\": container with ID starting with e7075a7eebe530e11af7e3315b70299c8c053cd4ce7f2411302d876904e5636c not found: ID does not exist" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.781749 4755 scope.go:117] "RemoveContainer" containerID="685fc9cf90f0bb87af8362b57de42861288e1390b96129cd0b7f4b63a208748a" Feb 02 22:37:48 crc kubenswrapper[4755]: E0202 22:37:48.782117 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"685fc9cf90f0bb87af8362b57de42861288e1390b96129cd0b7f4b63a208748a\": container with ID starting with 685fc9cf90f0bb87af8362b57de42861288e1390b96129cd0b7f4b63a208748a not found: ID does not exist" containerID="685fc9cf90f0bb87af8362b57de42861288e1390b96129cd0b7f4b63a208748a" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.782154 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"685fc9cf90f0bb87af8362b57de42861288e1390b96129cd0b7f4b63a208748a"} err="failed to get container status \"685fc9cf90f0bb87af8362b57de42861288e1390b96129cd0b7f4b63a208748a\": rpc error: code = NotFound desc = could not find container \"685fc9cf90f0bb87af8362b57de42861288e1390b96129cd0b7f4b63a208748a\": container with ID starting with 685fc9cf90f0bb87af8362b57de42861288e1390b96129cd0b7f4b63a208748a not found: ID does not exist" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.782179 4755 scope.go:117] "RemoveContainer" containerID="ac59bda1ebc7ad3154c1a9781f4674ef4a22fd38b94e0b88636ce5bb85ffc547" Feb 02 22:37:48 crc kubenswrapper[4755]: E0202 22:37:48.782598 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac59bda1ebc7ad3154c1a9781f4674ef4a22fd38b94e0b88636ce5bb85ffc547\": container with ID starting with ac59bda1ebc7ad3154c1a9781f4674ef4a22fd38b94e0b88636ce5bb85ffc547 not found: ID does not exist" containerID="ac59bda1ebc7ad3154c1a9781f4674ef4a22fd38b94e0b88636ce5bb85ffc547" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.782635 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac59bda1ebc7ad3154c1a9781f4674ef4a22fd38b94e0b88636ce5bb85ffc547"} err="failed to get container status \"ac59bda1ebc7ad3154c1a9781f4674ef4a22fd38b94e0b88636ce5bb85ffc547\": rpc error: code = NotFound desc = could not find container \"ac59bda1ebc7ad3154c1a9781f4674ef4a22fd38b94e0b88636ce5bb85ffc547\": container with ID starting with ac59bda1ebc7ad3154c1a9781f4674ef4a22fd38b94e0b88636ce5bb85ffc547 not found: ID does not exist" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.782654 4755 scope.go:117] "RemoveContainer" containerID="72388ffe20cf4deceabe7a126efec53dd5cd2f98a68cf04a1bed86ffb5035238" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.797066 4755 scope.go:117] "RemoveContainer" containerID="0c3a1e5f5253d1fdc1ec004023632b62effb676905a5117cb05c662c6720b50d" Feb 02 22:37:48 crc kubenswrapper[4755]: I0202 22:37:48.813086 4755 scope.go:117] "RemoveContainer" containerID="bcb492a25ccaadd0d6192df1865d437a68568c43949f0112c9398a4820d020d6" Feb 02 22:37:49 crc kubenswrapper[4755]: I0202 22:37:49.092501 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26f31363-966f-44cb-8cf1-fc6b071dad2b" path="/var/lib/kubelet/pods/26f31363-966f-44cb-8cf1-fc6b071dad2b/volumes" Feb 02 22:37:49 crc kubenswrapper[4755]: I0202 22:37:49.094667 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b90fc36-b80c-4011-926b-b1579c7d0ada" path="/var/lib/kubelet/pods/8b90fc36-b80c-4011-926b-b1579c7d0ada/volumes" Feb 02 22:37:49 crc kubenswrapper[4755]: I0202 22:37:49.743555 4755 generic.go:334] "Generic (PLEG): container finished" podID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" containerID="17d28346ef002acffb09ee567460dd45a2ae3d77c5d27a6bad80718fb7e09a8c" exitCode=0 Feb 02 22:37:49 crc kubenswrapper[4755]: I0202 22:37:49.743918 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n98p2" event={"ID":"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc","Type":"ContainerDied","Data":"17d28346ef002acffb09ee567460dd45a2ae3d77c5d27a6bad80718fb7e09a8c"} Feb 02 22:37:49 crc kubenswrapper[4755]: I0202 22:37:49.982700 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.165487 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgb26\" (UniqueName: \"kubernetes.io/projected/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-kube-api-access-kgb26\") pod \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\" (UID: \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\") " Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.166487 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-catalog-content\") pod \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\" (UID: \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\") " Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.166614 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-utilities\") pod \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\" (UID: \"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc\") " Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.167379 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-utilities" (OuterVolumeSpecName: "utilities") pod "a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" (UID: "a4e6bc82-8080-4a17-8c5a-7b20eaec23bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.177322 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-kube-api-access-kgb26" (OuterVolumeSpecName: "kube-api-access-kgb26") pod "a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" (UID: "a4e6bc82-8080-4a17-8c5a-7b20eaec23bc"). InnerVolumeSpecName "kube-api-access-kgb26". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.195287 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" (UID: "a4e6bc82-8080-4a17-8c5a-7b20eaec23bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.267900 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.267939 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.267949 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgb26\" (UniqueName: \"kubernetes.io/projected/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc-kube-api-access-kgb26\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.753288 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n98p2" event={"ID":"a4e6bc82-8080-4a17-8c5a-7b20eaec23bc","Type":"ContainerDied","Data":"0a49b33725e514ed39d67e098b79f507ed3d78429f6463b1e642db0623f84892"} Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.753376 4755 scope.go:117] "RemoveContainer" containerID="17d28346ef002acffb09ee567460dd45a2ae3d77c5d27a6bad80718fb7e09a8c" Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.753382 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n98p2" Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.769962 4755 scope.go:117] "RemoveContainer" containerID="455f8292f902ab1d9123d4b9b2838b96db73491a11106b1932da029b96749f05" Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.798141 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n98p2"] Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.802633 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n98p2"] Feb 02 22:37:50 crc kubenswrapper[4755]: I0202 22:37:50.811707 4755 scope.go:117] "RemoveContainer" containerID="abdd1c5c0420a986a1851f6beb6f16591b07bb0c6a67d9493e93c3352ed7a554" Feb 02 22:37:51 crc kubenswrapper[4755]: I0202 22:37:51.076570 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" path="/var/lib/kubelet/pods/a4e6bc82-8080-4a17-8c5a-7b20eaec23bc/volumes" Feb 02 22:37:53 crc kubenswrapper[4755]: I0202 22:37:53.389670 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:37:53 crc kubenswrapper[4755]: I0202 22:37:53.390031 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:37:53 crc kubenswrapper[4755]: I0202 22:37:53.390083 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:37:53 crc kubenswrapper[4755]: I0202 22:37:53.390650 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487"} pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 22:37:53 crc kubenswrapper[4755]: I0202 22:37:53.390705 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" containerID="cri-o://b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487" gracePeriod=600 Feb 02 22:37:53 crc kubenswrapper[4755]: I0202 22:37:53.778387 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerDied","Data":"b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487"} Feb 02 22:37:53 crc kubenswrapper[4755]: I0202 22:37:53.778093 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerID="b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487" exitCode=0 Feb 02 22:37:54 crc kubenswrapper[4755]: I0202 22:37:54.787101 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"f71cdcc7a983e505ca4c9dbfa22fdcc5349a3f06e9d60bb048ae1ae60366e471"} Feb 02 22:37:55 crc kubenswrapper[4755]: I0202 22:37:55.704879 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5nv2v"] Feb 02 22:37:56 crc kubenswrapper[4755]: I0202 22:37:56.620865 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:37:56 crc kubenswrapper[4755]: I0202 22:37:56.660801 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:37:56 crc kubenswrapper[4755]: I0202 22:37:56.905693 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:37:56 crc kubenswrapper[4755]: I0202 22:37:56.952416 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:37:58 crc kubenswrapper[4755]: I0202 22:37:58.363576 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qnqzm"] Feb 02 22:37:58 crc kubenswrapper[4755]: I0202 22:37:58.809092 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qnqzm" podUID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" containerName="registry-server" containerID="cri-o://1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e" gracePeriod=2 Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.277068 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.388750 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjrn8\" (UniqueName: \"kubernetes.io/projected/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-kube-api-access-zjrn8\") pod \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\" (UID: \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\") " Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.388806 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-utilities\") pod \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\" (UID: \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\") " Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.388860 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-catalog-content\") pod \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\" (UID: \"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391\") " Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.390384 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-utilities" (OuterVolumeSpecName: "utilities") pod "3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" (UID: "3e39e3e7-7d3c-4fe3-bc48-459aecbf1391"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.396916 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-kube-api-access-zjrn8" (OuterVolumeSpecName: "kube-api-access-zjrn8") pod "3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" (UID: "3e39e3e7-7d3c-4fe3-bc48-459aecbf1391"). InnerVolumeSpecName "kube-api-access-zjrn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.490852 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjrn8\" (UniqueName: \"kubernetes.io/projected/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-kube-api-access-zjrn8\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.490884 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.513315 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" (UID: "3e39e3e7-7d3c-4fe3-bc48-459aecbf1391"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.591996 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.817654 4755 generic.go:334] "Generic (PLEG): container finished" podID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" containerID="1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e" exitCode=0 Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.817702 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qnqzm" event={"ID":"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391","Type":"ContainerDied","Data":"1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e"} Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.817751 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qnqzm" event={"ID":"3e39e3e7-7d3c-4fe3-bc48-459aecbf1391","Type":"ContainerDied","Data":"e9debae8c766159b56e58030189378511c77617fc38c5c986733b8548d658f90"} Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.817780 4755 scope.go:117] "RemoveContainer" containerID="1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.817804 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qnqzm" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.858936 4755 scope.go:117] "RemoveContainer" containerID="9e9b2d91ca2a770cb9ef990189b905dd31c95835af09f3f0b7fb4b9722bd302c" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.890578 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qnqzm"] Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.895166 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qnqzm"] Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.904766 4755 scope.go:117] "RemoveContainer" containerID="691eb14585777af7925dadae1d5316a3841cd068112ba72fa2fc5e21a45ef3e8" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.928236 4755 scope.go:117] "RemoveContainer" containerID="1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e" Feb 02 22:37:59 crc kubenswrapper[4755]: E0202 22:37:59.928676 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e\": container with ID starting with 1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e not found: ID does not exist" containerID="1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.928744 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e"} err="failed to get container status \"1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e\": rpc error: code = NotFound desc = could not find container \"1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e\": container with ID starting with 1bf23744d2293a6570263339ee0f7e9dc1cc0b5eeefd5923d8ee573c56f2107e not found: ID does not exist" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.928777 4755 scope.go:117] "RemoveContainer" containerID="9e9b2d91ca2a770cb9ef990189b905dd31c95835af09f3f0b7fb4b9722bd302c" Feb 02 22:37:59 crc kubenswrapper[4755]: E0202 22:37:59.929291 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e9b2d91ca2a770cb9ef990189b905dd31c95835af09f3f0b7fb4b9722bd302c\": container with ID starting with 9e9b2d91ca2a770cb9ef990189b905dd31c95835af09f3f0b7fb4b9722bd302c not found: ID does not exist" containerID="9e9b2d91ca2a770cb9ef990189b905dd31c95835af09f3f0b7fb4b9722bd302c" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.929327 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e9b2d91ca2a770cb9ef990189b905dd31c95835af09f3f0b7fb4b9722bd302c"} err="failed to get container status \"9e9b2d91ca2a770cb9ef990189b905dd31c95835af09f3f0b7fb4b9722bd302c\": rpc error: code = NotFound desc = could not find container \"9e9b2d91ca2a770cb9ef990189b905dd31c95835af09f3f0b7fb4b9722bd302c\": container with ID starting with 9e9b2d91ca2a770cb9ef990189b905dd31c95835af09f3f0b7fb4b9722bd302c not found: ID does not exist" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.929354 4755 scope.go:117] "RemoveContainer" containerID="691eb14585777af7925dadae1d5316a3841cd068112ba72fa2fc5e21a45ef3e8" Feb 02 22:37:59 crc kubenswrapper[4755]: E0202 22:37:59.929657 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"691eb14585777af7925dadae1d5316a3841cd068112ba72fa2fc5e21a45ef3e8\": container with ID starting with 691eb14585777af7925dadae1d5316a3841cd068112ba72fa2fc5e21a45ef3e8 not found: ID does not exist" containerID="691eb14585777af7925dadae1d5316a3841cd068112ba72fa2fc5e21a45ef3e8" Feb 02 22:37:59 crc kubenswrapper[4755]: I0202 22:37:59.929783 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"691eb14585777af7925dadae1d5316a3841cd068112ba72fa2fc5e21a45ef3e8"} err="failed to get container status \"691eb14585777af7925dadae1d5316a3841cd068112ba72fa2fc5e21a45ef3e8\": rpc error: code = NotFound desc = could not find container \"691eb14585777af7925dadae1d5316a3841cd068112ba72fa2fc5e21a45ef3e8\": container with ID starting with 691eb14585777af7925dadae1d5316a3841cd068112ba72fa2fc5e21a45ef3e8 not found: ID does not exist" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.029947 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5b9787fb89-npsks"] Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.030199 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" podUID="46d93868-8f7e-44c2-905d-17c8570b9213" containerName="controller-manager" containerID="cri-o://84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79" gracePeriod=30 Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.125711 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67"] Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.126061 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" podUID="cf88b16a-a2b9-4647-8187-af9d05a1a872" containerName="route-controller-manager" containerID="cri-o://1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf" gracePeriod=30 Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.646062 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.653725 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.714584 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-proxy-ca-bundles\") pod \"46d93868-8f7e-44c2-905d-17c8570b9213\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.714657 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf88b16a-a2b9-4647-8187-af9d05a1a872-serving-cert\") pod \"cf88b16a-a2b9-4647-8187-af9d05a1a872\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.714693 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf88b16a-a2b9-4647-8187-af9d05a1a872-client-ca\") pod \"cf88b16a-a2b9-4647-8187-af9d05a1a872\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.714774 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf88b16a-a2b9-4647-8187-af9d05a1a872-config\") pod \"cf88b16a-a2b9-4647-8187-af9d05a1a872\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.714812 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5jzt\" (UniqueName: \"kubernetes.io/projected/cf88b16a-a2b9-4647-8187-af9d05a1a872-kube-api-access-x5jzt\") pod \"cf88b16a-a2b9-4647-8187-af9d05a1a872\" (UID: \"cf88b16a-a2b9-4647-8187-af9d05a1a872\") " Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.714839 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-config\") pod \"46d93868-8f7e-44c2-905d-17c8570b9213\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.714864 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46d93868-8f7e-44c2-905d-17c8570b9213-serving-cert\") pod \"46d93868-8f7e-44c2-905d-17c8570b9213\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.714905 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-client-ca\") pod \"46d93868-8f7e-44c2-905d-17c8570b9213\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.714937 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f55fd\" (UniqueName: \"kubernetes.io/projected/46d93868-8f7e-44c2-905d-17c8570b9213-kube-api-access-f55fd\") pod \"46d93868-8f7e-44c2-905d-17c8570b9213\" (UID: \"46d93868-8f7e-44c2-905d-17c8570b9213\") " Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.715540 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "46d93868-8f7e-44c2-905d-17c8570b9213" (UID: "46d93868-8f7e-44c2-905d-17c8570b9213"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.715602 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf88b16a-a2b9-4647-8187-af9d05a1a872-config" (OuterVolumeSpecName: "config") pod "cf88b16a-a2b9-4647-8187-af9d05a1a872" (UID: "cf88b16a-a2b9-4647-8187-af9d05a1a872"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.715592 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf88b16a-a2b9-4647-8187-af9d05a1a872-client-ca" (OuterVolumeSpecName: "client-ca") pod "cf88b16a-a2b9-4647-8187-af9d05a1a872" (UID: "cf88b16a-a2b9-4647-8187-af9d05a1a872"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.716167 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-config" (OuterVolumeSpecName: "config") pod "46d93868-8f7e-44c2-905d-17c8570b9213" (UID: "46d93868-8f7e-44c2-905d-17c8570b9213"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.716296 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-client-ca" (OuterVolumeSpecName: "client-ca") pod "46d93868-8f7e-44c2-905d-17c8570b9213" (UID: "46d93868-8f7e-44c2-905d-17c8570b9213"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.719522 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46d93868-8f7e-44c2-905d-17c8570b9213-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "46d93868-8f7e-44c2-905d-17c8570b9213" (UID: "46d93868-8f7e-44c2-905d-17c8570b9213"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.719584 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf88b16a-a2b9-4647-8187-af9d05a1a872-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "cf88b16a-a2b9-4647-8187-af9d05a1a872" (UID: "cf88b16a-a2b9-4647-8187-af9d05a1a872"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.719589 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46d93868-8f7e-44c2-905d-17c8570b9213-kube-api-access-f55fd" (OuterVolumeSpecName: "kube-api-access-f55fd") pod "46d93868-8f7e-44c2-905d-17c8570b9213" (UID: "46d93868-8f7e-44c2-905d-17c8570b9213"). InnerVolumeSpecName "kube-api-access-f55fd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.720478 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf88b16a-a2b9-4647-8187-af9d05a1a872-kube-api-access-x5jzt" (OuterVolumeSpecName: "kube-api-access-x5jzt") pod "cf88b16a-a2b9-4647-8187-af9d05a1a872" (UID: "cf88b16a-a2b9-4647-8187-af9d05a1a872"). InnerVolumeSpecName "kube-api-access-x5jzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.816086 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.816121 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f55fd\" (UniqueName: \"kubernetes.io/projected/46d93868-8f7e-44c2-905d-17c8570b9213-kube-api-access-f55fd\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.816136 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.816149 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf88b16a-a2b9-4647-8187-af9d05a1a872-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.816161 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf88b16a-a2b9-4647-8187-af9d05a1a872-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.816173 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf88b16a-a2b9-4647-8187-af9d05a1a872-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.816185 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5jzt\" (UniqueName: \"kubernetes.io/projected/cf88b16a-a2b9-4647-8187-af9d05a1a872-kube-api-access-x5jzt\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.816197 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46d93868-8f7e-44c2-905d-17c8570b9213-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.816209 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46d93868-8f7e-44c2-905d-17c8570b9213-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.826545 4755 generic.go:334] "Generic (PLEG): container finished" podID="cf88b16a-a2b9-4647-8187-af9d05a1a872" containerID="1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf" exitCode=0 Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.826627 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" event={"ID":"cf88b16a-a2b9-4647-8187-af9d05a1a872","Type":"ContainerDied","Data":"1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf"} Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.826653 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.826676 4755 scope.go:117] "RemoveContainer" containerID="1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.826663 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67" event={"ID":"cf88b16a-a2b9-4647-8187-af9d05a1a872","Type":"ContainerDied","Data":"9f3d3e0f9aade1078b014b5e75a84489a799d48b68553def8ed9d026d3533239"} Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.828430 4755 generic.go:334] "Generic (PLEG): container finished" podID="46d93868-8f7e-44c2-905d-17c8570b9213" containerID="84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79" exitCode=0 Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.828502 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.828511 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" event={"ID":"46d93868-8f7e-44c2-905d-17c8570b9213","Type":"ContainerDied","Data":"84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79"} Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.828616 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5b9787fb89-npsks" event={"ID":"46d93868-8f7e-44c2-905d-17c8570b9213","Type":"ContainerDied","Data":"3f05ec48153175798c9c0a2d86e45ccf02a2e0c261d250770ebc9108acc3bab6"} Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.846682 4755 scope.go:117] "RemoveContainer" containerID="1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf" Feb 02 22:38:00 crc kubenswrapper[4755]: E0202 22:38:00.847053 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf\": container with ID starting with 1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf not found: ID does not exist" containerID="1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.847093 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf"} err="failed to get container status \"1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf\": rpc error: code = NotFound desc = could not find container \"1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf\": container with ID starting with 1b5b30cbde928c548a79b95033bb81e8c91565fc6452a0db28ea2f407ec990cf not found: ID does not exist" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.847117 4755 scope.go:117] "RemoveContainer" containerID="84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.867564 4755 scope.go:117] "RemoveContainer" containerID="84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79" Feb 02 22:38:00 crc kubenswrapper[4755]: E0202 22:38:00.868113 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79\": container with ID starting with 84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79 not found: ID does not exist" containerID="84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.868149 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79"} err="failed to get container status \"84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79\": rpc error: code = NotFound desc = could not find container \"84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79\": container with ID starting with 84b32e16b83377a07f4461282a080e7716fe62c21e93300666aaeee7bf977b79 not found: ID does not exist" Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.869181 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5b9787fb89-npsks"] Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.878513 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5b9787fb89-npsks"] Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.883606 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67"] Feb 02 22:38:00 crc kubenswrapper[4755]: I0202 22:38:00.887585 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5477c7c5b-mgn67"] Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.078665 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" path="/var/lib/kubelet/pods/3e39e3e7-7d3c-4fe3-bc48-459aecbf1391/volumes" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.079279 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46d93868-8f7e-44c2-905d-17c8570b9213" path="/var/lib/kubelet/pods/46d93868-8f7e-44c2-905d-17c8570b9213/volumes" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.079744 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf88b16a-a2b9-4647-8187-af9d05a1a872" path="/var/lib/kubelet/pods/cf88b16a-a2b9-4647-8187-af9d05a1a872/volumes" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.706597 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8"] Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.706996 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" containerName="extract-content" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707018 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" containerName="extract-content" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707035 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46d93868-8f7e-44c2-905d-17c8570b9213" containerName="controller-manager" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707046 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="46d93868-8f7e-44c2-905d-17c8570b9213" containerName="controller-manager" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707059 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26f31363-966f-44cb-8cf1-fc6b071dad2b" containerName="extract-utilities" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707070 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="26f31363-966f-44cb-8cf1-fc6b071dad2b" containerName="extract-utilities" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707082 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" containerName="registry-server" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707093 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" containerName="registry-server" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707110 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b90fc36-b80c-4011-926b-b1579c7d0ada" containerName="registry-server" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707119 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b90fc36-b80c-4011-926b-b1579c7d0ada" containerName="registry-server" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707136 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" containerName="extract-utilities" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707147 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" containerName="extract-utilities" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707163 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b90fc36-b80c-4011-926b-b1579c7d0ada" containerName="extract-utilities" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707177 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b90fc36-b80c-4011-926b-b1579c7d0ada" containerName="extract-utilities" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707188 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf88b16a-a2b9-4647-8187-af9d05a1a872" containerName="route-controller-manager" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707198 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf88b16a-a2b9-4647-8187-af9d05a1a872" containerName="route-controller-manager" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707214 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26f31363-966f-44cb-8cf1-fc6b071dad2b" containerName="extract-content" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707224 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="26f31363-966f-44cb-8cf1-fc6b071dad2b" containerName="extract-content" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707240 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26f31363-966f-44cb-8cf1-fc6b071dad2b" containerName="registry-server" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707250 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="26f31363-966f-44cb-8cf1-fc6b071dad2b" containerName="registry-server" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707266 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" containerName="registry-server" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707277 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" containerName="registry-server" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707292 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95a09db3-3fad-4c40-88da-5015d8f0468b" containerName="pruner" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707303 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="95a09db3-3fad-4c40-88da-5015d8f0468b" containerName="pruner" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707317 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" containerName="extract-utilities" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707328 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" containerName="extract-utilities" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707345 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b90fc36-b80c-4011-926b-b1579c7d0ada" containerName="extract-content" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707356 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b90fc36-b80c-4011-926b-b1579c7d0ada" containerName="extract-content" Feb 02 22:38:01 crc kubenswrapper[4755]: E0202 22:38:01.707369 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" containerName="extract-content" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707378 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" containerName="extract-content" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707527 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e39e3e7-7d3c-4fe3-bc48-459aecbf1391" containerName="registry-server" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707543 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b90fc36-b80c-4011-926b-b1579c7d0ada" containerName="registry-server" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707561 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf88b16a-a2b9-4647-8187-af9d05a1a872" containerName="route-controller-manager" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707577 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4e6bc82-8080-4a17-8c5a-7b20eaec23bc" containerName="registry-server" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707589 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="26f31363-966f-44cb-8cf1-fc6b071dad2b" containerName="registry-server" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707606 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="95a09db3-3fad-4c40-88da-5015d8f0468b" containerName="pruner" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.707622 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="46d93868-8f7e-44c2-905d-17c8570b9213" containerName="controller-manager" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.708514 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.710453 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr"] Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.712050 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.713793 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.714407 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.715058 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.715765 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.716120 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.716511 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.718710 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.719223 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.719487 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.720014 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8"] Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.723912 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.725093 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.725312 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.726835 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4977977d-0e69-414d-a7b5-bd9d6586f3c9-config\") pod \"route-controller-manager-cd8f4584-7mqmr\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.726902 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1962000-4669-40af-a431-51a11190c54c-serving-cert\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.726931 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-config\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.726956 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-client-ca\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.726984 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-proxy-ca-bundles\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.727019 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scqgd\" (UniqueName: \"kubernetes.io/projected/4977977d-0e69-414d-a7b5-bd9d6586f3c9-kube-api-access-scqgd\") pod \"route-controller-manager-cd8f4584-7mqmr\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.727047 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4977977d-0e69-414d-a7b5-bd9d6586f3c9-serving-cert\") pod \"route-controller-manager-cd8f4584-7mqmr\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.727083 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzd58\" (UniqueName: \"kubernetes.io/projected/f1962000-4669-40af-a431-51a11190c54c-kube-api-access-wzd58\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.727106 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4977977d-0e69-414d-a7b5-bd9d6586f3c9-client-ca\") pod \"route-controller-manager-cd8f4584-7mqmr\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.744837 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.749771 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr"] Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.828409 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-config\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.828484 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-client-ca\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.828527 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-proxy-ca-bundles\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.828579 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scqgd\" (UniqueName: \"kubernetes.io/projected/4977977d-0e69-414d-a7b5-bd9d6586f3c9-kube-api-access-scqgd\") pod \"route-controller-manager-cd8f4584-7mqmr\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.828664 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4977977d-0e69-414d-a7b5-bd9d6586f3c9-serving-cert\") pod \"route-controller-manager-cd8f4584-7mqmr\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.828771 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzd58\" (UniqueName: \"kubernetes.io/projected/f1962000-4669-40af-a431-51a11190c54c-kube-api-access-wzd58\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.828815 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4977977d-0e69-414d-a7b5-bd9d6586f3c9-client-ca\") pod \"route-controller-manager-cd8f4584-7mqmr\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.828867 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4977977d-0e69-414d-a7b5-bd9d6586f3c9-config\") pod \"route-controller-manager-cd8f4584-7mqmr\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.828900 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1962000-4669-40af-a431-51a11190c54c-serving-cert\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.830101 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-client-ca\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.830325 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-config\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.830532 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4977977d-0e69-414d-a7b5-bd9d6586f3c9-config\") pod \"route-controller-manager-cd8f4584-7mqmr\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.830755 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4977977d-0e69-414d-a7b5-bd9d6586f3c9-client-ca\") pod \"route-controller-manager-cd8f4584-7mqmr\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.831308 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-proxy-ca-bundles\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.833837 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4977977d-0e69-414d-a7b5-bd9d6586f3c9-serving-cert\") pod \"route-controller-manager-cd8f4584-7mqmr\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.834435 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1962000-4669-40af-a431-51a11190c54c-serving-cert\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.853612 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scqgd\" (UniqueName: \"kubernetes.io/projected/4977977d-0e69-414d-a7b5-bd9d6586f3c9-kube-api-access-scqgd\") pod \"route-controller-manager-cd8f4584-7mqmr\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:01 crc kubenswrapper[4755]: I0202 22:38:01.873539 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzd58\" (UniqueName: \"kubernetes.io/projected/f1962000-4669-40af-a431-51a11190c54c-kube-api-access-wzd58\") pod \"controller-manager-7d4946cb7c-l5jc8\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.059279 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.068038 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.506277 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8"] Feb 02 22:38:02 crc kubenswrapper[4755]: W0202 22:38:02.518712 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1962000_4669_40af_a431_51a11190c54c.slice/crio-ab9c54860ae10da7bdec93de9c399516e154f1357e243872ae3b865eb72b561d WatchSource:0}: Error finding container ab9c54860ae10da7bdec93de9c399516e154f1357e243872ae3b865eb72b561d: Status 404 returned error can't find the container with id ab9c54860ae10da7bdec93de9c399516e154f1357e243872ae3b865eb72b561d Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.610559 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr"] Feb 02 22:38:02 crc kubenswrapper[4755]: W0202 22:38:02.616508 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4977977d_0e69_414d_a7b5_bd9d6586f3c9.slice/crio-a12cffaffdeb7751e4b26ac76f022babeea8dde88a8ac81ce67de34f0f755965 WatchSource:0}: Error finding container a12cffaffdeb7751e4b26ac76f022babeea8dde88a8ac81ce67de34f0f755965: Status 404 returned error can't find the container with id a12cffaffdeb7751e4b26ac76f022babeea8dde88a8ac81ce67de34f0f755965 Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.847144 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" event={"ID":"f1962000-4669-40af-a431-51a11190c54c","Type":"ContainerStarted","Data":"a1dd80289385c8cd3cd9bf0e0d36d0cdc0581492e4ad680f945115b56d2b6c8f"} Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.847489 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" event={"ID":"f1962000-4669-40af-a431-51a11190c54c","Type":"ContainerStarted","Data":"ab9c54860ae10da7bdec93de9c399516e154f1357e243872ae3b865eb72b561d"} Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.847531 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.848766 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" event={"ID":"4977977d-0e69-414d-a7b5-bd9d6586f3c9","Type":"ContainerStarted","Data":"6ca5c6dd82c3b1bc6591f791aa75ae0b410cadbc4306687b008e22b1e3e11e23"} Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.848800 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" event={"ID":"4977977d-0e69-414d-a7b5-bd9d6586f3c9","Type":"ContainerStarted","Data":"a12cffaffdeb7751e4b26ac76f022babeea8dde88a8ac81ce67de34f0f755965"} Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.849087 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.858971 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.871378 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" podStartSLOduration=2.871357019 podStartE2EDuration="2.871357019s" podCreationTimestamp="2026-02-02 22:38:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:38:02.871290557 +0000 UTC m=+238.562510913" watchObservedRunningTime="2026-02-02 22:38:02.871357019 +0000 UTC m=+238.562577355" Feb 02 22:38:02 crc kubenswrapper[4755]: I0202 22:38:02.927032 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" podStartSLOduration=2.927014658 podStartE2EDuration="2.927014658s" podCreationTimestamp="2026-02-02 22:38:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:38:02.922985503 +0000 UTC m=+238.614205829" watchObservedRunningTime="2026-02-02 22:38:02.927014658 +0000 UTC m=+238.618234984" Feb 02 22:38:03 crc kubenswrapper[4755]: I0202 22:38:03.215951 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.101550 4755 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.103179 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.111288 4755 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.111648 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81" gracePeriod=15 Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.111700 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d" gracePeriod=15 Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.111806 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d" gracePeriod=15 Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.111792 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310" gracePeriod=15 Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.112325 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800" gracePeriod=15 Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.116002 4755 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 22:38:11 crc kubenswrapper[4755]: E0202 22:38:11.116441 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.116485 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 22:38:11 crc kubenswrapper[4755]: E0202 22:38:11.116514 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.116535 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 22:38:11 crc kubenswrapper[4755]: E0202 22:38:11.116557 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.116575 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 22:38:11 crc kubenswrapper[4755]: E0202 22:38:11.116602 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.116618 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 22:38:11 crc kubenswrapper[4755]: E0202 22:38:11.116648 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.116667 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 22:38:11 crc kubenswrapper[4755]: E0202 22:38:11.116695 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.116712 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 22:38:11 crc kubenswrapper[4755]: E0202 22:38:11.116769 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.116788 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 02 22:38:11 crc kubenswrapper[4755]: E0202 22:38:11.116817 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.116835 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.117062 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.117088 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.117119 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.117141 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.117163 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.117189 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.117907 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.158529 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.166315 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.166359 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.166394 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.166437 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.166554 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.166578 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.166607 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.166628 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.267628 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268040 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268083 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268117 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268114 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268213 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268216 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268259 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.267755 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268156 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268238 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268357 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268409 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268427 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268481 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.268511 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.299888 4755 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.299987 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.453080 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:11 crc kubenswrapper[4755]: E0202 22:38:11.487424 4755 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.36:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.18908f01cef39591 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 22:38:11.486414225 +0000 UTC m=+247.177634561,LastTimestamp:2026-02-02 22:38:11.486414225 +0000 UTC m=+247.177634561,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.909248 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.910349 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.911043 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d" exitCode=0 Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.911117 4755 scope.go:117] "RemoveContainer" containerID="e6671011d43c7f6e9adc7cd89abc14d02c2931ea6a4016693d727b67e25a6641" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.911069 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800" exitCode=0 Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.911225 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310" exitCode=0 Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.911235 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d" exitCode=2 Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.913308 4755 generic.go:334] "Generic (PLEG): container finished" podID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" containerID="4b768d5da3c4176d1e11cad616eb628c4010e0029d971d595d4840bd5685ea43" exitCode=0 Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.913335 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"371c3c30-c3ba-47a1-bc16-9b0a621be7b3","Type":"ContainerDied","Data":"4b768d5da3c4176d1e11cad616eb628c4010e0029d971d595d4840bd5685ea43"} Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.914249 4755 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.914446 4755 status_manager.go:851] "Failed to get status for pod" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.914639 4755 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.915065 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"6d58dc87d118bf34b8b9de7cfb19c3ffaa0f2b6b0f45ec36c8dc0ad9487cf9be"} Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.915089 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"3ed466dcb3e69383e79ecdfe5019d8161303e39e92a335a4396d66c392465d6c"} Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.915744 4755 status_manager.go:851] "Failed to get status for pod" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.915903 4755 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:11 crc kubenswrapper[4755]: I0202 22:38:11.916194 4755 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:12 crc kubenswrapper[4755]: I0202 22:38:12.928995 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.421272 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.421956 4755 status_manager.go:851] "Failed to get status for pod" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.422141 4755 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.496895 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-kube-api-access\") pod \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\" (UID: \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\") " Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.497232 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-var-lock\") pod \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\" (UID: \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\") " Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.497270 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-kubelet-dir\") pod \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\" (UID: \"371c3c30-c3ba-47a1-bc16-9b0a621be7b3\") " Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.497358 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-var-lock" (OuterVolumeSpecName: "var-lock") pod "371c3c30-c3ba-47a1-bc16-9b0a621be7b3" (UID: "371c3c30-c3ba-47a1-bc16-9b0a621be7b3"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.497472 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "371c3c30-c3ba-47a1-bc16-9b0a621be7b3" (UID: "371c3c30-c3ba-47a1-bc16-9b0a621be7b3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.497612 4755 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-var-lock\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.497638 4755 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.503148 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.504178 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.504661 4755 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.504903 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "371c3c30-c3ba-47a1-bc16-9b0a621be7b3" (UID: "371c3c30-c3ba-47a1-bc16-9b0a621be7b3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.505037 4755 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.505636 4755 status_manager.go:851] "Failed to get status for pod" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.599090 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.599426 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.599575 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.599216 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.599518 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.599667 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.600217 4755 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.600306 4755 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.600382 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/371c3c30-c3ba-47a1-bc16-9b0a621be7b3-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.600458 4755 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.942107 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"371c3c30-c3ba-47a1-bc16-9b0a621be7b3","Type":"ContainerDied","Data":"24b07be36096619dec532c46f99e6bcdb53ef71bfa871981bb785c6e2fa845d8"} Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.942191 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24b07be36096619dec532c46f99e6bcdb53ef71bfa871981bb785c6e2fa845d8" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.942140 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.947851 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.949514 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81" exitCode=0 Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.949595 4755 scope.go:117] "RemoveContainer" containerID="a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.949837 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.969597 4755 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.969957 4755 status_manager.go:851] "Failed to get status for pod" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.970260 4755 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.977115 4755 status_manager.go:851] "Failed to get status for pod" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.977688 4755 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.978248 4755 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:13 crc kubenswrapper[4755]: I0202 22:38:13.983376 4755 scope.go:117] "RemoveContainer" containerID="ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.009020 4755 scope.go:117] "RemoveContainer" containerID="fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.032511 4755 scope.go:117] "RemoveContainer" containerID="41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.059040 4755 scope.go:117] "RemoveContainer" containerID="b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.076480 4755 scope.go:117] "RemoveContainer" containerID="9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.100055 4755 scope.go:117] "RemoveContainer" containerID="a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d" Feb 02 22:38:14 crc kubenswrapper[4755]: E0202 22:38:14.100640 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\": container with ID starting with a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d not found: ID does not exist" containerID="a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.100720 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d"} err="failed to get container status \"a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\": rpc error: code = NotFound desc = could not find container \"a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d\": container with ID starting with a7bb3420509a3354a4a7e72a200db23f444b815bf44a9009214605e8f606105d not found: ID does not exist" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.100840 4755 scope.go:117] "RemoveContainer" containerID="ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800" Feb 02 22:38:14 crc kubenswrapper[4755]: E0202 22:38:14.101682 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\": container with ID starting with ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800 not found: ID does not exist" containerID="ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.101761 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800"} err="failed to get container status \"ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\": rpc error: code = NotFound desc = could not find container \"ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800\": container with ID starting with ebee4cee14840575f64c2f8e0f8d0068dee189dd9073109050ac192d778ad800 not found: ID does not exist" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.101799 4755 scope.go:117] "RemoveContainer" containerID="fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310" Feb 02 22:38:14 crc kubenswrapper[4755]: E0202 22:38:14.102231 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\": container with ID starting with fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310 not found: ID does not exist" containerID="fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.102280 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310"} err="failed to get container status \"fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\": rpc error: code = NotFound desc = could not find container \"fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310\": container with ID starting with fc4d85ebb847eecaeacee3f159c93ffe8c1e9f37f22d9bc20c28188e4dfac310 not found: ID does not exist" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.102359 4755 scope.go:117] "RemoveContainer" containerID="41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d" Feb 02 22:38:14 crc kubenswrapper[4755]: E0202 22:38:14.102829 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\": container with ID starting with 41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d not found: ID does not exist" containerID="41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.103093 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d"} err="failed to get container status \"41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\": rpc error: code = NotFound desc = could not find container \"41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d\": container with ID starting with 41eb34987d93594b835a8bdfb245b7b6d33ffbddd015e4c5b94e2290becc823d not found: ID does not exist" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.103126 4755 scope.go:117] "RemoveContainer" containerID="b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81" Feb 02 22:38:14 crc kubenswrapper[4755]: E0202 22:38:14.103594 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\": container with ID starting with b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81 not found: ID does not exist" containerID="b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.103665 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81"} err="failed to get container status \"b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\": rpc error: code = NotFound desc = could not find container \"b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81\": container with ID starting with b80f67cf3f24212742135a36f559a9bfa2ef384b2c00f82647221486848a5e81 not found: ID does not exist" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.103710 4755 scope.go:117] "RemoveContainer" containerID="9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc" Feb 02 22:38:14 crc kubenswrapper[4755]: E0202 22:38:14.104133 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\": container with ID starting with 9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc not found: ID does not exist" containerID="9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc" Feb 02 22:38:14 crc kubenswrapper[4755]: I0202 22:38:14.104180 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc"} err="failed to get container status \"9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\": rpc error: code = NotFound desc = could not find container \"9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc\": container with ID starting with 9f5df4c168e48d1e3c440a85283dd772c31fff97d368f066b81f35f8227d1fdc not found: ID does not exist" Feb 02 22:38:15 crc kubenswrapper[4755]: I0202 22:38:15.073790 4755 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:15 crc kubenswrapper[4755]: I0202 22:38:15.074283 4755 status_manager.go:851] "Failed to get status for pod" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:15 crc kubenswrapper[4755]: I0202 22:38:15.075146 4755 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:15 crc kubenswrapper[4755]: I0202 22:38:15.083110 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Feb 02 22:38:15 crc kubenswrapper[4755]: E0202 22:38:15.248691 4755 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:15 crc kubenswrapper[4755]: E0202 22:38:15.249298 4755 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:15 crc kubenswrapper[4755]: E0202 22:38:15.250137 4755 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:15 crc kubenswrapper[4755]: E0202 22:38:15.250759 4755 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:15 crc kubenswrapper[4755]: E0202 22:38:15.251518 4755 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:15 crc kubenswrapper[4755]: I0202 22:38:15.251569 4755 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Feb 02 22:38:15 crc kubenswrapper[4755]: E0202 22:38:15.252017 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" interval="200ms" Feb 02 22:38:15 crc kubenswrapper[4755]: E0202 22:38:15.385885 4755 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.36:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.18908f01cef39591 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 22:38:11.486414225 +0000 UTC m=+247.177634561,LastTimestamp:2026-02-02 22:38:11.486414225 +0000 UTC m=+247.177634561,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 22:38:15 crc kubenswrapper[4755]: E0202 22:38:15.453201 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" interval="400ms" Feb 02 22:38:15 crc kubenswrapper[4755]: E0202 22:38:15.854897 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" interval="800ms" Feb 02 22:38:16 crc kubenswrapper[4755]: E0202 22:38:16.656939 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" interval="1.6s" Feb 02 22:38:18 crc kubenswrapper[4755]: E0202 22:38:18.259060 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" interval="3.2s" Feb 02 22:38:20 crc kubenswrapper[4755]: I0202 22:38:20.731182 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" podUID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" containerName="oauth-openshift" containerID="cri-o://ced2195515b0330332aeab79cae8f611a29220e18873ac65413cfdbfb7d57abd" gracePeriod=15 Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.015163 4755 generic.go:334] "Generic (PLEG): container finished" podID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" containerID="ced2195515b0330332aeab79cae8f611a29220e18873ac65413cfdbfb7d57abd" exitCode=0 Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.015215 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" event={"ID":"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc","Type":"ContainerDied","Data":"ced2195515b0330332aeab79cae8f611a29220e18873ac65413cfdbfb7d57abd"} Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.353338 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.354443 4755 status_manager.go:851] "Failed to get status for pod" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.355123 4755 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.355806 4755 status_manager.go:851] "Failed to get status for pod" podUID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-5nv2v\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.408403 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-ocp-branding-template\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.408539 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-cliconfig\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.408618 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-idp-0-file-data\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.408667 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-error\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.408702 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wd5kf\" (UniqueName: \"kubernetes.io/projected/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-kube-api-access-wd5kf\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.408762 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-audit-policies\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.408832 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-service-ca\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.408912 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-trusted-ca-bundle\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.410520 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-provider-selection\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.410595 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-serving-cert\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.411576 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-login\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.410867 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.410939 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.411007 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.411671 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-router-certs\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.411762 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-session\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.411809 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-audit-dir\") pod \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\" (UID: \"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc\") " Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.412240 4755 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.412268 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.412295 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.412346 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.413173 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.418241 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.419014 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.419324 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-kube-api-access-wd5kf" (OuterVolumeSpecName: "kube-api-access-wd5kf") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "kube-api-access-wd5kf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.419431 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.420024 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.420918 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.421534 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.421976 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.426400 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" (UID: "4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:38:21 crc kubenswrapper[4755]: E0202 22:38:21.460501 4755 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.36:6443: connect: connection refused" interval="6.4s" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.513647 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.513702 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wd5kf\" (UniqueName: \"kubernetes.io/projected/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-kube-api-access-wd5kf\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.513723 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.513774 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.513796 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.513815 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.513833 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.513853 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.513873 4755 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.513892 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:21 crc kubenswrapper[4755]: I0202 22:38:21.513909 4755 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:22 crc kubenswrapper[4755]: I0202 22:38:22.027499 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" event={"ID":"4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc","Type":"ContainerDied","Data":"2942c6f2ea07561cf887ce34cad88d61a5739bd787ad4170964cf545a9e663f1"} Feb 02 22:38:22 crc kubenswrapper[4755]: I0202 22:38:22.027552 4755 scope.go:117] "RemoveContainer" containerID="ced2195515b0330332aeab79cae8f611a29220e18873ac65413cfdbfb7d57abd" Feb 02 22:38:22 crc kubenswrapper[4755]: I0202 22:38:22.027625 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" Feb 02 22:38:22 crc kubenswrapper[4755]: I0202 22:38:22.028383 4755 status_manager.go:851] "Failed to get status for pod" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:22 crc kubenswrapper[4755]: I0202 22:38:22.030627 4755 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:22 crc kubenswrapper[4755]: I0202 22:38:22.031254 4755 status_manager.go:851] "Failed to get status for pod" podUID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-5nv2v\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:22 crc kubenswrapper[4755]: I0202 22:38:22.057490 4755 status_manager.go:851] "Failed to get status for pod" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:22 crc kubenswrapper[4755]: I0202 22:38:22.058349 4755 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:22 crc kubenswrapper[4755]: I0202 22:38:22.060153 4755 status_manager.go:851] "Failed to get status for pod" podUID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-5nv2v\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:23 crc kubenswrapper[4755]: I0202 22:38:23.071376 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:23 crc kubenswrapper[4755]: I0202 22:38:23.072472 4755 status_manager.go:851] "Failed to get status for pod" podUID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-5nv2v\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:23 crc kubenswrapper[4755]: I0202 22:38:23.072654 4755 status_manager.go:851] "Failed to get status for pod" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:23 crc kubenswrapper[4755]: I0202 22:38:23.072865 4755 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:23 crc kubenswrapper[4755]: I0202 22:38:23.086965 4755 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="eea10ee7-ddaf-4f3c-86b1-82410e04081a" Feb 02 22:38:23 crc kubenswrapper[4755]: I0202 22:38:23.087003 4755 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="eea10ee7-ddaf-4f3c-86b1-82410e04081a" Feb 02 22:38:23 crc kubenswrapper[4755]: E0202 22:38:23.087450 4755 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:23 crc kubenswrapper[4755]: I0202 22:38:23.087901 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:23 crc kubenswrapper[4755]: W0202 22:38:23.103254 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-483ade77067f8718904a5b11e13310261563f4199d6fb3bafed8b732ec711752 WatchSource:0}: Error finding container 483ade77067f8718904a5b11e13310261563f4199d6fb3bafed8b732ec711752: Status 404 returned error can't find the container with id 483ade77067f8718904a5b11e13310261563f4199d6fb3bafed8b732ec711752 Feb 02 22:38:24 crc kubenswrapper[4755]: I0202 22:38:24.045953 4755 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="6cc0f9e73f88abcf77863a92000e7916995214e18a431b07fe14c3a660938657" exitCode=0 Feb 02 22:38:24 crc kubenswrapper[4755]: I0202 22:38:24.046211 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"6cc0f9e73f88abcf77863a92000e7916995214e18a431b07fe14c3a660938657"} Feb 02 22:38:24 crc kubenswrapper[4755]: I0202 22:38:24.046514 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"483ade77067f8718904a5b11e13310261563f4199d6fb3bafed8b732ec711752"} Feb 02 22:38:24 crc kubenswrapper[4755]: I0202 22:38:24.047165 4755 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="eea10ee7-ddaf-4f3c-86b1-82410e04081a" Feb 02 22:38:24 crc kubenswrapper[4755]: I0202 22:38:24.047205 4755 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="eea10ee7-ddaf-4f3c-86b1-82410e04081a" Feb 02 22:38:24 crc kubenswrapper[4755]: E0202 22:38:24.047881 4755 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:24 crc kubenswrapper[4755]: I0202 22:38:24.048214 4755 status_manager.go:851] "Failed to get status for pod" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:24 crc kubenswrapper[4755]: I0202 22:38:24.048627 4755 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:24 crc kubenswrapper[4755]: I0202 22:38:24.049322 4755 status_manager.go:851] "Failed to get status for pod" podUID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" pod="openshift-authentication/oauth-openshift-558db77b4-5nv2v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-5nv2v\": dial tcp 38.102.83.36:6443: connect: connection refused" Feb 02 22:38:25 crc kubenswrapper[4755]: I0202 22:38:25.061928 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 02 22:38:25 crc kubenswrapper[4755]: I0202 22:38:25.062004 4755 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="9e1be4ef7a84da3c4efc5096837fa048c01dbacc0266c47237c05e92e9c93400" exitCode=1 Feb 02 22:38:25 crc kubenswrapper[4755]: I0202 22:38:25.062113 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"9e1be4ef7a84da3c4efc5096837fa048c01dbacc0266c47237c05e92e9c93400"} Feb 02 22:38:25 crc kubenswrapper[4755]: I0202 22:38:25.062708 4755 scope.go:117] "RemoveContainer" containerID="9e1be4ef7a84da3c4efc5096837fa048c01dbacc0266c47237c05e92e9c93400" Feb 02 22:38:25 crc kubenswrapper[4755]: I0202 22:38:25.076597 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0077d29e1401d5bf171e7f2a2391035bc87143be29dca28db0df715d40b38d48"} Feb 02 22:38:25 crc kubenswrapper[4755]: I0202 22:38:25.076628 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"49f91d9e3347a64441c00dc0909d6a4f082c55822ae031670720d026b0b827b4"} Feb 02 22:38:25 crc kubenswrapper[4755]: I0202 22:38:25.076641 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"32a8bfee721124415aa9b796de381ebe8198e39c7dc3aee81dc9ef11e562da57"} Feb 02 22:38:25 crc kubenswrapper[4755]: I0202 22:38:25.914314 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:38:26 crc kubenswrapper[4755]: I0202 22:38:26.077228 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 02 22:38:26 crc kubenswrapper[4755]: I0202 22:38:26.077297 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c9784f55adf1d0fcd8ca287a927a0312dbfe88b275ab604a8999299751f9fec2"} Feb 02 22:38:26 crc kubenswrapper[4755]: I0202 22:38:26.080462 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d7cc810d777dbcf293b21f6ff1e3613b676164da28d7e6982b795d22edded13e"} Feb 02 22:38:26 crc kubenswrapper[4755]: I0202 22:38:26.080484 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"79a37822bcf1e5708cc157c7b2c5a917d41c29a8810961b7c0b6b7056880dea2"} Feb 02 22:38:26 crc kubenswrapper[4755]: I0202 22:38:26.080649 4755 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="eea10ee7-ddaf-4f3c-86b1-82410e04081a" Feb 02 22:38:26 crc kubenswrapper[4755]: I0202 22:38:26.080667 4755 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="eea10ee7-ddaf-4f3c-86b1-82410e04081a" Feb 02 22:38:26 crc kubenswrapper[4755]: I0202 22:38:26.080818 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:27 crc kubenswrapper[4755]: I0202 22:38:27.084170 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:38:27 crc kubenswrapper[4755]: I0202 22:38:27.089097 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:38:28 crc kubenswrapper[4755]: I0202 22:38:28.090022 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:28 crc kubenswrapper[4755]: I0202 22:38:28.091170 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:28 crc kubenswrapper[4755]: I0202 22:38:28.093812 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:38:28 crc kubenswrapper[4755]: I0202 22:38:28.100185 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:31 crc kubenswrapper[4755]: I0202 22:38:31.088010 4755 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:32 crc kubenswrapper[4755]: I0202 22:38:32.118118 4755 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="eea10ee7-ddaf-4f3c-86b1-82410e04081a" Feb 02 22:38:32 crc kubenswrapper[4755]: I0202 22:38:32.118163 4755 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="eea10ee7-ddaf-4f3c-86b1-82410e04081a" Feb 02 22:38:32 crc kubenswrapper[4755]: I0202 22:38:32.125592 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:32 crc kubenswrapper[4755]: I0202 22:38:32.129905 4755 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="cf765601-1708-437b-9ec5-e98938eee5ce" Feb 02 22:38:33 crc kubenswrapper[4755]: I0202 22:38:33.126461 4755 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="eea10ee7-ddaf-4f3c-86b1-82410e04081a" Feb 02 22:38:33 crc kubenswrapper[4755]: I0202 22:38:33.126510 4755 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="eea10ee7-ddaf-4f3c-86b1-82410e04081a" Feb 02 22:38:33 crc kubenswrapper[4755]: I0202 22:38:33.130484 4755 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="cf765601-1708-437b-9ec5-e98938eee5ce" Feb 02 22:38:38 crc kubenswrapper[4755]: I0202 22:38:38.751462 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 22:38:41 crc kubenswrapper[4755]: I0202 22:38:41.201170 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 02 22:38:41 crc kubenswrapper[4755]: I0202 22:38:41.928573 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 22:38:42 crc kubenswrapper[4755]: I0202 22:38:42.521406 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 02 22:38:42 crc kubenswrapper[4755]: I0202 22:38:42.748832 4755 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 02 22:38:43 crc kubenswrapper[4755]: I0202 22:38:43.056598 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 22:38:43 crc kubenswrapper[4755]: I0202 22:38:43.391485 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 02 22:38:43 crc kubenswrapper[4755]: I0202 22:38:43.508991 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 02 22:38:44 crc kubenswrapper[4755]: I0202 22:38:44.159948 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 02 22:38:44 crc kubenswrapper[4755]: I0202 22:38:44.168087 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 02 22:38:44 crc kubenswrapper[4755]: I0202 22:38:44.331742 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 02 22:38:44 crc kubenswrapper[4755]: I0202 22:38:44.409706 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 02 22:38:44 crc kubenswrapper[4755]: I0202 22:38:44.707053 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 02 22:38:44 crc kubenswrapper[4755]: I0202 22:38:44.744902 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 02 22:38:44 crc kubenswrapper[4755]: I0202 22:38:44.868820 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 02 22:38:44 crc kubenswrapper[4755]: I0202 22:38:44.956888 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 02 22:38:44 crc kubenswrapper[4755]: I0202 22:38:44.974852 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 02 22:38:45 crc kubenswrapper[4755]: I0202 22:38:45.049976 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 02 22:38:45 crc kubenswrapper[4755]: I0202 22:38:45.129449 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 02 22:38:45 crc kubenswrapper[4755]: I0202 22:38:45.366139 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 02 22:38:45 crc kubenswrapper[4755]: I0202 22:38:45.473099 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 02 22:38:45 crc kubenswrapper[4755]: I0202 22:38:45.488587 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 02 22:38:45 crc kubenswrapper[4755]: I0202 22:38:45.543783 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 02 22:38:45 crc kubenswrapper[4755]: I0202 22:38:45.552414 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 02 22:38:45 crc kubenswrapper[4755]: I0202 22:38:45.568272 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 02 22:38:45 crc kubenswrapper[4755]: I0202 22:38:45.734718 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 02 22:38:45 crc kubenswrapper[4755]: I0202 22:38:45.792128 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 02 22:38:45 crc kubenswrapper[4755]: I0202 22:38:45.867564 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.079608 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.102668 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.221501 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.333834 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.380773 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.396007 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.478563 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.499017 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.543978 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.579813 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.614662 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.628460 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.649287 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.769193 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.800638 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.913928 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.938308 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 02 22:38:46 crc kubenswrapper[4755]: I0202 22:38:46.939568 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.080845 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.117615 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.265106 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.431360 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.446202 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.455704 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.492693 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.716777 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.725668 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.788333 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.878808 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.939824 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 02 22:38:47 crc kubenswrapper[4755]: I0202 22:38:47.943690 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.083459 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.145855 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.276547 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.286345 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.313041 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.343594 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.349418 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.357273 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.477332 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.589897 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.603407 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.732614 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.754101 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.792715 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.806539 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.892863 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 02 22:38:48 crc kubenswrapper[4755]: I0202 22:38:48.918563 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.095124 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.112165 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.180164 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.188519 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.296719 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.305444 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.316296 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.456221 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.483538 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.518968 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.520528 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.609968 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.643136 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.670422 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.693494 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.826584 4755 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.829139 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=38.82911078 podStartE2EDuration="38.82911078s" podCreationTimestamp="2026-02-02 22:38:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:38:31.10567563 +0000 UTC m=+266.796895976" watchObservedRunningTime="2026-02-02 22:38:49.82911078 +0000 UTC m=+285.520331116" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.832054 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-5nv2v"] Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.832113 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.838343 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.839627 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.899455 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.900869 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=18.900850837 podStartE2EDuration="18.900850837s" podCreationTimestamp="2026-02-02 22:38:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:38:49.872365195 +0000 UTC m=+285.563585521" watchObservedRunningTime="2026-02-02 22:38:49.900850837 +0000 UTC m=+285.592071163" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.924474 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 02 22:38:49 crc kubenswrapper[4755]: I0202 22:38:49.992169 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.006544 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.028440 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.035209 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.188254 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.353627 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.401955 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.407071 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.416599 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.420771 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.538012 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.610762 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.618423 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.673626 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.904446 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 02 22:38:50 crc kubenswrapper[4755]: I0202 22:38:50.955661 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.003834 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.011310 4755 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.014875 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.045071 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.069569 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.089136 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" path="/var/lib/kubelet/pods/4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc/volumes" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.094265 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.139832 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.172417 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.187147 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.187168 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.245898 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.262244 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.301055 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.332299 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.332366 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.361226 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.419062 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.500819 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.545666 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.694999 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.710167 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.718179 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.778374 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.882060 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.885446 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.990127 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 22:38:51 crc kubenswrapper[4755]: I0202 22:38:51.992048 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.037248 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.040136 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.128862 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.128878 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.214384 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.246761 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.347652 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.513970 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.521834 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.641123 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.682613 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.732560 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.822487 4755 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.832557 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.901171 4755 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.966430 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 02 22:38:52 crc kubenswrapper[4755]: I0202 22:38:52.979538 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.090954 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.122231 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.128204 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr"] Feb 02 22:38:53 crc kubenswrapper[4755]: E0202 22:38:53.128496 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" containerName="installer" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.128526 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" containerName="installer" Feb 02 22:38:53 crc kubenswrapper[4755]: E0202 22:38:53.128552 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" containerName="oauth-openshift" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.128565 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" containerName="oauth-openshift" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.128773 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="4790ec6f-3dcb-4367-ba5b-6a7cd2a8a9bc" containerName="oauth-openshift" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.128794 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="371c3c30-c3ba-47a1-bc16-9b0a621be7b3" containerName="installer" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.129371 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.134128 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.134340 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.134423 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.134476 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.134548 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.134555 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.134557 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.135520 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.135524 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.135636 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.136603 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.136768 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.143265 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.148248 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr"] Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.149632 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.156161 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.194264 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.197476 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.221435 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.236352 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-service-ca\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.236649 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-session\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.236935 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a9154157-3c49-44f9-98a5-54d722db80cc-audit-dir\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.237189 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.237437 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-user-template-login\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.237694 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-user-template-error\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.238075 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a9154157-3c49-44f9-98a5-54d722db80cc-audit-policies\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.238352 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.238550 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.238931 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-router-certs\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.239211 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klv2f\" (UniqueName: \"kubernetes.io/projected/a9154157-3c49-44f9-98a5-54d722db80cc-kube-api-access-klv2f\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.239556 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.239899 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.240213 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.279855 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.341664 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.341799 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-user-template-login\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.341828 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-user-template-error\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.341854 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a9154157-3c49-44f9-98a5-54d722db80cc-audit-policies\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.341879 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.341903 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.341930 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-router-certs\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.341957 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klv2f\" (UniqueName: \"kubernetes.io/projected/a9154157-3c49-44f9-98a5-54d722db80cc-kube-api-access-klv2f\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.341983 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.342006 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.342025 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.342042 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-service-ca\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.342070 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-session\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.342088 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a9154157-3c49-44f9-98a5-54d722db80cc-audit-dir\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.342144 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a9154157-3c49-44f9-98a5-54d722db80cc-audit-dir\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.343434 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a9154157-3c49-44f9-98a5-54d722db80cc-audit-policies\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.343500 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.344513 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-service-ca\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.347591 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.347607 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.347821 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-session\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.349218 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.349260 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-user-template-login\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.349365 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.349372 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-user-template-error\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.350403 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-router-certs\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.354585 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a9154157-3c49-44f9-98a5-54d722db80cc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.368162 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klv2f\" (UniqueName: \"kubernetes.io/projected/a9154157-3c49-44f9-98a5-54d722db80cc-kube-api-access-klv2f\") pod \"oauth-openshift-6746f8c8c7-jjrmr\" (UID: \"a9154157-3c49-44f9-98a5-54d722db80cc\") " pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.377157 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.394075 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.396915 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.396929 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.427421 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.440787 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.496938 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.594799 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.600703 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.663938 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.690418 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.706121 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.707431 4755 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.707638 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://6d58dc87d118bf34b8b9de7cfb19c3ffaa0f2b6b0f45ec36c8dc0ad9487cf9be" gracePeriod=5 Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.940576 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 02 22:38:53 crc kubenswrapper[4755]: I0202 22:38:53.944997 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr"] Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.026602 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.108436 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.132845 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.209259 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.253628 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.274007 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" event={"ID":"a9154157-3c49-44f9-98a5-54d722db80cc","Type":"ContainerStarted","Data":"d672bf22849e4276cf057156813d9558c36d00dd34ddc6fe7a32beb0ab581f08"} Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.402681 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.423005 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.462966 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.494098 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.535095 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.536688 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.664083 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.687864 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.723975 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.792389 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.818171 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.889120 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 02 22:38:54 crc kubenswrapper[4755]: I0202 22:38:54.947082 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.015589 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.147542 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.281625 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" event={"ID":"a9154157-3c49-44f9-98a5-54d722db80cc","Type":"ContainerStarted","Data":"7716a0f64bf1a33a8e93b631f5f9865a4274119d9e09b8b6794540cdc2d01840"} Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.282093 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.297424 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.300450 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.300802 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.307436 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6746f8c8c7-jjrmr" podStartSLOduration=60.307413236 podStartE2EDuration="1m0.307413236s" podCreationTimestamp="2026-02-02 22:37:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:38:55.303089275 +0000 UTC m=+290.994309631" watchObservedRunningTime="2026-02-02 22:38:55.307413236 +0000 UTC m=+290.998633592" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.372951 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.381489 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.386390 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.397850 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.424437 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.688817 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.688995 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.706559 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.746779 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.780949 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.782761 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.796858 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.863797 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.873400 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 02 22:38:55 crc kubenswrapper[4755]: I0202 22:38:55.992775 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 22:38:56 crc kubenswrapper[4755]: I0202 22:38:56.036047 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 22:38:56 crc kubenswrapper[4755]: I0202 22:38:56.222852 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 02 22:38:56 crc kubenswrapper[4755]: I0202 22:38:56.281916 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 22:38:56 crc kubenswrapper[4755]: I0202 22:38:56.291823 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 02 22:38:56 crc kubenswrapper[4755]: I0202 22:38:56.545446 4755 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 02 22:38:56 crc kubenswrapper[4755]: I0202 22:38:56.665411 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 02 22:38:57 crc kubenswrapper[4755]: I0202 22:38:57.165079 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 02 22:38:57 crc kubenswrapper[4755]: I0202 22:38:57.231834 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 02 22:38:57 crc kubenswrapper[4755]: I0202 22:38:57.320018 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 02 22:38:57 crc kubenswrapper[4755]: I0202 22:38:57.400009 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 02 22:38:57 crc kubenswrapper[4755]: I0202 22:38:57.509510 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 02 22:38:57 crc kubenswrapper[4755]: I0202 22:38:57.517218 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 02 22:38:57 crc kubenswrapper[4755]: I0202 22:38:57.618369 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 02 22:38:57 crc kubenswrapper[4755]: I0202 22:38:57.654550 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 22:38:57 crc kubenswrapper[4755]: I0202 22:38:57.810835 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 02 22:38:57 crc kubenswrapper[4755]: I0202 22:38:57.876558 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 02 22:38:57 crc kubenswrapper[4755]: I0202 22:38:57.925787 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.035131 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.098550 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.117984 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.307572 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.346385 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.659660 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.841950 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.842026 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.923608 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.923802 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.923892 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.923925 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.923965 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.923979 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.924089 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.924100 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.924135 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.924541 4755 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.924571 4755 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.924597 4755 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.924621 4755 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:58 crc kubenswrapper[4755]: I0202 22:38:58.936035 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.025617 4755 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.062640 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.080712 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.081878 4755 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.099817 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.099883 4755 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="1bf44f8d-eb09-4bfa-ad80-0cf66b878e93" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.108066 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.108124 4755 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="1bf44f8d-eb09-4bfa-ad80-0cf66b878e93" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.171987 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.250231 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.321424 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.321516 4755 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="6d58dc87d118bf34b8b9de7cfb19c3ffaa0f2b6b0f45ec36c8dc0ad9487cf9be" exitCode=137 Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.321622 4755 scope.go:117] "RemoveContainer" containerID="6d58dc87d118bf34b8b9de7cfb19c3ffaa0f2b6b0f45ec36c8dc0ad9487cf9be" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.321961 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.350625 4755 scope.go:117] "RemoveContainer" containerID="6d58dc87d118bf34b8b9de7cfb19c3ffaa0f2b6b0f45ec36c8dc0ad9487cf9be" Feb 02 22:38:59 crc kubenswrapper[4755]: E0202 22:38:59.351382 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d58dc87d118bf34b8b9de7cfb19c3ffaa0f2b6b0f45ec36c8dc0ad9487cf9be\": container with ID starting with 6d58dc87d118bf34b8b9de7cfb19c3ffaa0f2b6b0f45ec36c8dc0ad9487cf9be not found: ID does not exist" containerID="6d58dc87d118bf34b8b9de7cfb19c3ffaa0f2b6b0f45ec36c8dc0ad9487cf9be" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.351472 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d58dc87d118bf34b8b9de7cfb19c3ffaa0f2b6b0f45ec36c8dc0ad9487cf9be"} err="failed to get container status \"6d58dc87d118bf34b8b9de7cfb19c3ffaa0f2b6b0f45ec36c8dc0ad9487cf9be\": rpc error: code = NotFound desc = could not find container \"6d58dc87d118bf34b8b9de7cfb19c3ffaa0f2b6b0f45ec36c8dc0ad9487cf9be\": container with ID starting with 6d58dc87d118bf34b8b9de7cfb19c3ffaa0f2b6b0f45ec36c8dc0ad9487cf9be not found: ID does not exist" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.511827 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 02 22:38:59 crc kubenswrapper[4755]: I0202 22:38:59.581631 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.051764 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8"] Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.052121 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" podUID="f1962000-4669-40af-a431-51a11190c54c" containerName="controller-manager" containerID="cri-o://a1dd80289385c8cd3cd9bf0e0d36d0cdc0581492e4ad680f945115b56d2b6c8f" gracePeriod=30 Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.142241 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr"] Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.142489 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" podUID="4977977d-0e69-414d-a7b5-bd9d6586f3c9" containerName="route-controller-manager" containerID="cri-o://6ca5c6dd82c3b1bc6591f791aa75ae0b410cadbc4306687b008e22b1e3e11e23" gracePeriod=30 Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.330678 4755 generic.go:334] "Generic (PLEG): container finished" podID="4977977d-0e69-414d-a7b5-bd9d6586f3c9" containerID="6ca5c6dd82c3b1bc6591f791aa75ae0b410cadbc4306687b008e22b1e3e11e23" exitCode=0 Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.330785 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" event={"ID":"4977977d-0e69-414d-a7b5-bd9d6586f3c9","Type":"ContainerDied","Data":"6ca5c6dd82c3b1bc6591f791aa75ae0b410cadbc4306687b008e22b1e3e11e23"} Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.337337 4755 generic.go:334] "Generic (PLEG): container finished" podID="f1962000-4669-40af-a431-51a11190c54c" containerID="a1dd80289385c8cd3cd9bf0e0d36d0cdc0581492e4ad680f945115b56d2b6c8f" exitCode=0 Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.337371 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" event={"ID":"f1962000-4669-40af-a431-51a11190c54c","Type":"ContainerDied","Data":"a1dd80289385c8cd3cd9bf0e0d36d0cdc0581492e4ad680f945115b56d2b6c8f"} Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.516670 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.521371 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.548028 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1962000-4669-40af-a431-51a11190c54c-serving-cert\") pod \"f1962000-4669-40af-a431-51a11190c54c\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.548205 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-client-ca\") pod \"f1962000-4669-40af-a431-51a11190c54c\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.548250 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-config\") pod \"f1962000-4669-40af-a431-51a11190c54c\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.548294 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scqgd\" (UniqueName: \"kubernetes.io/projected/4977977d-0e69-414d-a7b5-bd9d6586f3c9-kube-api-access-scqgd\") pod \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.548357 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-proxy-ca-bundles\") pod \"f1962000-4669-40af-a431-51a11190c54c\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.548428 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4977977d-0e69-414d-a7b5-bd9d6586f3c9-config\") pod \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.548472 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzd58\" (UniqueName: \"kubernetes.io/projected/f1962000-4669-40af-a431-51a11190c54c-kube-api-access-wzd58\") pod \"f1962000-4669-40af-a431-51a11190c54c\" (UID: \"f1962000-4669-40af-a431-51a11190c54c\") " Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.548531 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4977977d-0e69-414d-a7b5-bd9d6586f3c9-client-ca\") pod \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.548595 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4977977d-0e69-414d-a7b5-bd9d6586f3c9-serving-cert\") pod \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\" (UID: \"4977977d-0e69-414d-a7b5-bd9d6586f3c9\") " Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.549542 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-client-ca" (OuterVolumeSpecName: "client-ca") pod "f1962000-4669-40af-a431-51a11190c54c" (UID: "f1962000-4669-40af-a431-51a11190c54c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.549833 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-config" (OuterVolumeSpecName: "config") pod "f1962000-4669-40af-a431-51a11190c54c" (UID: "f1962000-4669-40af-a431-51a11190c54c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.550139 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4977977d-0e69-414d-a7b5-bd9d6586f3c9-client-ca" (OuterVolumeSpecName: "client-ca") pod "4977977d-0e69-414d-a7b5-bd9d6586f3c9" (UID: "4977977d-0e69-414d-a7b5-bd9d6586f3c9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.550215 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "f1962000-4669-40af-a431-51a11190c54c" (UID: "f1962000-4669-40af-a431-51a11190c54c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.550550 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4977977d-0e69-414d-a7b5-bd9d6586f3c9-config" (OuterVolumeSpecName: "config") pod "4977977d-0e69-414d-a7b5-bd9d6586f3c9" (UID: "4977977d-0e69-414d-a7b5-bd9d6586f3c9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.553586 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1962000-4669-40af-a431-51a11190c54c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f1962000-4669-40af-a431-51a11190c54c" (UID: "f1962000-4669-40af-a431-51a11190c54c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.554613 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4977977d-0e69-414d-a7b5-bd9d6586f3c9-kube-api-access-scqgd" (OuterVolumeSpecName: "kube-api-access-scqgd") pod "4977977d-0e69-414d-a7b5-bd9d6586f3c9" (UID: "4977977d-0e69-414d-a7b5-bd9d6586f3c9"). InnerVolumeSpecName "kube-api-access-scqgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.556289 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4977977d-0e69-414d-a7b5-bd9d6586f3c9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4977977d-0e69-414d-a7b5-bd9d6586f3c9" (UID: "4977977d-0e69-414d-a7b5-bd9d6586f3c9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.557046 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1962000-4669-40af-a431-51a11190c54c-kube-api-access-wzd58" (OuterVolumeSpecName: "kube-api-access-wzd58") pod "f1962000-4669-40af-a431-51a11190c54c" (UID: "f1962000-4669-40af-a431-51a11190c54c"). InnerVolumeSpecName "kube-api-access-wzd58". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.650338 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.650391 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.650411 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scqgd\" (UniqueName: \"kubernetes.io/projected/4977977d-0e69-414d-a7b5-bd9d6586f3c9-kube-api-access-scqgd\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.650433 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f1962000-4669-40af-a431-51a11190c54c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.650450 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4977977d-0e69-414d-a7b5-bd9d6586f3c9-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.650469 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzd58\" (UniqueName: \"kubernetes.io/projected/f1962000-4669-40af-a431-51a11190c54c-kube-api-access-wzd58\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.650486 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4977977d-0e69-414d-a7b5-bd9d6586f3c9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.650502 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4977977d-0e69-414d-a7b5-bd9d6586f3c9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:00 crc kubenswrapper[4755]: I0202 22:39:00.650521 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f1962000-4669-40af-a431-51a11190c54c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.346510 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" event={"ID":"f1962000-4669-40af-a431-51a11190c54c","Type":"ContainerDied","Data":"ab9c54860ae10da7bdec93de9c399516e154f1357e243872ae3b865eb72b561d"} Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.346612 4755 scope.go:117] "RemoveContainer" containerID="a1dd80289385c8cd3cd9bf0e0d36d0cdc0581492e4ad680f945115b56d2b6c8f" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.346540 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.352073 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" event={"ID":"4977977d-0e69-414d-a7b5-bd9d6586f3c9","Type":"ContainerDied","Data":"a12cffaffdeb7751e4b26ac76f022babeea8dde88a8ac81ce67de34f0f755965"} Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.352206 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.376171 4755 scope.go:117] "RemoveContainer" containerID="6ca5c6dd82c3b1bc6591f791aa75ae0b410cadbc4306687b008e22b1e3e11e23" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.377490 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8"] Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.390139 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7d4946cb7c-l5jc8"] Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.396784 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr"] Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.402822 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd8f4584-7mqmr"] Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.742916 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-544d98fbb8-bz2xr"] Feb 02 22:39:01 crc kubenswrapper[4755]: E0202 22:39:01.743418 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1962000-4669-40af-a431-51a11190c54c" containerName="controller-manager" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.743458 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1962000-4669-40af-a431-51a11190c54c" containerName="controller-manager" Feb 02 22:39:01 crc kubenswrapper[4755]: E0202 22:39:01.743497 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4977977d-0e69-414d-a7b5-bd9d6586f3c9" containerName="route-controller-manager" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.743515 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4977977d-0e69-414d-a7b5-bd9d6586f3c9" containerName="route-controller-manager" Feb 02 22:39:01 crc kubenswrapper[4755]: E0202 22:39:01.743549 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.743567 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.743829 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1962000-4669-40af-a431-51a11190c54c" containerName="controller-manager" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.743872 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="4977977d-0e69-414d-a7b5-bd9d6586f3c9" containerName="route-controller-manager" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.743893 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.744683 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.750405 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.750464 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.752059 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.752118 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.752142 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.752933 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.753258 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst"] Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.754469 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.759874 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.760181 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.760633 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.760649 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.760879 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.764111 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.767845 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-config\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.768333 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-544d98fbb8-bz2xr"] Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.769128 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a24292eb-0bdf-4d61-b38f-06510d88d743-serving-cert\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.769339 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-client-ca\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.769534 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-proxy-ca-bundles\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.769546 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.769759 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pmpn\" (UniqueName: \"kubernetes.io/projected/a24292eb-0bdf-4d61-b38f-06510d88d743-kube-api-access-6pmpn\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.770924 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst"] Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.871767 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bb3270d1-512a-47da-bb29-6a76a3746b40-client-ca\") pod \"route-controller-manager-cd9c86f64-p8wst\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.871829 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-proxy-ca-bundles\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.871889 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb3270d1-512a-47da-bb29-6a76a3746b40-serving-cert\") pod \"route-controller-manager-cd9c86f64-p8wst\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.871934 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pmpn\" (UniqueName: \"kubernetes.io/projected/a24292eb-0bdf-4d61-b38f-06510d88d743-kube-api-access-6pmpn\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.871975 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bt58\" (UniqueName: \"kubernetes.io/projected/bb3270d1-512a-47da-bb29-6a76a3746b40-kube-api-access-4bt58\") pod \"route-controller-manager-cd9c86f64-p8wst\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.872002 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-config\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.872047 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb3270d1-512a-47da-bb29-6a76a3746b40-config\") pod \"route-controller-manager-cd9c86f64-p8wst\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.872084 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a24292eb-0bdf-4d61-b38f-06510d88d743-serving-cert\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.872115 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-client-ca\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.873138 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-proxy-ca-bundles\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.873158 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-client-ca\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.873833 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-config\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.876142 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a24292eb-0bdf-4d61-b38f-06510d88d743-serving-cert\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.898320 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pmpn\" (UniqueName: \"kubernetes.io/projected/a24292eb-0bdf-4d61-b38f-06510d88d743-kube-api-access-6pmpn\") pod \"controller-manager-544d98fbb8-bz2xr\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.973021 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bb3270d1-512a-47da-bb29-6a76a3746b40-client-ca\") pod \"route-controller-manager-cd9c86f64-p8wst\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.973080 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb3270d1-512a-47da-bb29-6a76a3746b40-serving-cert\") pod \"route-controller-manager-cd9c86f64-p8wst\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.973112 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bt58\" (UniqueName: \"kubernetes.io/projected/bb3270d1-512a-47da-bb29-6a76a3746b40-kube-api-access-4bt58\") pod \"route-controller-manager-cd9c86f64-p8wst\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.973146 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb3270d1-512a-47da-bb29-6a76a3746b40-config\") pod \"route-controller-manager-cd9c86f64-p8wst\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.974158 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb3270d1-512a-47da-bb29-6a76a3746b40-config\") pod \"route-controller-manager-cd9c86f64-p8wst\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.974435 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bb3270d1-512a-47da-bb29-6a76a3746b40-client-ca\") pod \"route-controller-manager-cd9c86f64-p8wst\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.980705 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb3270d1-512a-47da-bb29-6a76a3746b40-serving-cert\") pod \"route-controller-manager-cd9c86f64-p8wst\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:01 crc kubenswrapper[4755]: I0202 22:39:01.999666 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bt58\" (UniqueName: \"kubernetes.io/projected/bb3270d1-512a-47da-bb29-6a76a3746b40-kube-api-access-4bt58\") pod \"route-controller-manager-cd9c86f64-p8wst\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:02 crc kubenswrapper[4755]: I0202 22:39:02.097844 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:02 crc kubenswrapper[4755]: I0202 22:39:02.110108 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:02 crc kubenswrapper[4755]: I0202 22:39:02.361704 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-544d98fbb8-bz2xr"] Feb 02 22:39:02 crc kubenswrapper[4755]: I0202 22:39:02.417124 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst"] Feb 02 22:39:02 crc kubenswrapper[4755]: W0202 22:39:02.424861 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb3270d1_512a_47da_bb29_6a76a3746b40.slice/crio-48cdca94d777ca390ed662aa852431f86d5b28fd2800a60c46d5bfb5943f3572 WatchSource:0}: Error finding container 48cdca94d777ca390ed662aa852431f86d5b28fd2800a60c46d5bfb5943f3572: Status 404 returned error can't find the container with id 48cdca94d777ca390ed662aa852431f86d5b28fd2800a60c46d5bfb5943f3572 Feb 02 22:39:03 crc kubenswrapper[4755]: I0202 22:39:03.081119 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4977977d-0e69-414d-a7b5-bd9d6586f3c9" path="/var/lib/kubelet/pods/4977977d-0e69-414d-a7b5-bd9d6586f3c9/volumes" Feb 02 22:39:03 crc kubenswrapper[4755]: I0202 22:39:03.082617 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1962000-4669-40af-a431-51a11190c54c" path="/var/lib/kubelet/pods/f1962000-4669-40af-a431-51a11190c54c/volumes" Feb 02 22:39:03 crc kubenswrapper[4755]: I0202 22:39:03.372894 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" event={"ID":"a24292eb-0bdf-4d61-b38f-06510d88d743","Type":"ContainerStarted","Data":"f80040a8a786c7d19163742dfd416a435a2bd5a4938a8c54c6022ae824be7895"} Feb 02 22:39:03 crc kubenswrapper[4755]: I0202 22:39:03.372957 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" event={"ID":"a24292eb-0bdf-4d61-b38f-06510d88d743","Type":"ContainerStarted","Data":"096ba93fed471d1dd404b1581c10caffa9d96ef4981f3867227c987668384b6a"} Feb 02 22:39:03 crc kubenswrapper[4755]: I0202 22:39:03.373257 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:03 crc kubenswrapper[4755]: I0202 22:39:03.378997 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" event={"ID":"bb3270d1-512a-47da-bb29-6a76a3746b40","Type":"ContainerStarted","Data":"e9c5d33ba674353caa4c2a169e35cdbdb636c2306863ac85e3bc6db66f42886c"} Feb 02 22:39:03 crc kubenswrapper[4755]: I0202 22:39:03.379058 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" event={"ID":"bb3270d1-512a-47da-bb29-6a76a3746b40","Type":"ContainerStarted","Data":"48cdca94d777ca390ed662aa852431f86d5b28fd2800a60c46d5bfb5943f3572"} Feb 02 22:39:03 crc kubenswrapper[4755]: I0202 22:39:03.379254 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:03 crc kubenswrapper[4755]: I0202 22:39:03.381611 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:03 crc kubenswrapper[4755]: I0202 22:39:03.387230 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:03 crc kubenswrapper[4755]: I0202 22:39:03.403119 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" podStartSLOduration=3.40309617 podStartE2EDuration="3.40309617s" podCreationTimestamp="2026-02-02 22:39:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:39:03.401159316 +0000 UTC m=+299.092379672" watchObservedRunningTime="2026-02-02 22:39:03.40309617 +0000 UTC m=+299.094316526" Feb 02 22:39:03 crc kubenswrapper[4755]: I0202 22:39:03.440669 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" podStartSLOduration=3.440651498 podStartE2EDuration="3.440651498s" podCreationTimestamp="2026-02-02 22:39:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:39:03.440375559 +0000 UTC m=+299.131595895" watchObservedRunningTime="2026-02-02 22:39:03.440651498 +0000 UTC m=+299.131871824" Feb 02 22:39:04 crc kubenswrapper[4755]: I0202 22:39:04.822200 4755 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Feb 02 22:39:13 crc kubenswrapper[4755]: I0202 22:39:13.454612 4755 generic.go:334] "Generic (PLEG): container finished" podID="467ef27d-8f51-4317-80ee-9071d7024f86" containerID="3a7e23b649f364d0d3ee753c085c1a297d22374e6a1c7c434e47a67c95325cd8" exitCode=0 Feb 02 22:39:13 crc kubenswrapper[4755]: I0202 22:39:13.454712 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" event={"ID":"467ef27d-8f51-4317-80ee-9071d7024f86","Type":"ContainerDied","Data":"3a7e23b649f364d0d3ee753c085c1a297d22374e6a1c7c434e47a67c95325cd8"} Feb 02 22:39:13 crc kubenswrapper[4755]: I0202 22:39:13.455974 4755 scope.go:117] "RemoveContainer" containerID="3a7e23b649f364d0d3ee753c085c1a297d22374e6a1c7c434e47a67c95325cd8" Feb 02 22:39:14 crc kubenswrapper[4755]: I0202 22:39:14.462473 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" event={"ID":"467ef27d-8f51-4317-80ee-9071d7024f86","Type":"ContainerStarted","Data":"80ea541105c28da2ae11e17fb34aaf04bb86c51a802d6621ddf4954c92bdfaae"} Feb 02 22:39:14 crc kubenswrapper[4755]: I0202 22:39:14.463183 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:39:14 crc kubenswrapper[4755]: I0202 22:39:14.464899 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.012545 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-544d98fbb8-bz2xr"] Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.012943 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" podUID="a24292eb-0bdf-4d61-b38f-06510d88d743" containerName="controller-manager" containerID="cri-o://f80040a8a786c7d19163742dfd416a435a2bd5a4938a8c54c6022ae824be7895" gracePeriod=30 Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.031274 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst"] Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.031599 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" podUID="bb3270d1-512a-47da-bb29-6a76a3746b40" containerName="route-controller-manager" containerID="cri-o://e9c5d33ba674353caa4c2a169e35cdbdb636c2306863ac85e3bc6db66f42886c" gracePeriod=30 Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.498052 4755 generic.go:334] "Generic (PLEG): container finished" podID="a24292eb-0bdf-4d61-b38f-06510d88d743" containerID="f80040a8a786c7d19163742dfd416a435a2bd5a4938a8c54c6022ae824be7895" exitCode=0 Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.498122 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" event={"ID":"a24292eb-0bdf-4d61-b38f-06510d88d743","Type":"ContainerDied","Data":"f80040a8a786c7d19163742dfd416a435a2bd5a4938a8c54c6022ae824be7895"} Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.499522 4755 generic.go:334] "Generic (PLEG): container finished" podID="bb3270d1-512a-47da-bb29-6a76a3746b40" containerID="e9c5d33ba674353caa4c2a169e35cdbdb636c2306863ac85e3bc6db66f42886c" exitCode=0 Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.499551 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" event={"ID":"bb3270d1-512a-47da-bb29-6a76a3746b40","Type":"ContainerDied","Data":"e9c5d33ba674353caa4c2a169e35cdbdb636c2306863ac85e3bc6db66f42886c"} Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.592930 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.598465 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.737895 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb3270d1-512a-47da-bb29-6a76a3746b40-serving-cert\") pod \"bb3270d1-512a-47da-bb29-6a76a3746b40\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.738186 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a24292eb-0bdf-4d61-b38f-06510d88d743-serving-cert\") pod \"a24292eb-0bdf-4d61-b38f-06510d88d743\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.738215 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb3270d1-512a-47da-bb29-6a76a3746b40-config\") pod \"bb3270d1-512a-47da-bb29-6a76a3746b40\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.738251 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bt58\" (UniqueName: \"kubernetes.io/projected/bb3270d1-512a-47da-bb29-6a76a3746b40-kube-api-access-4bt58\") pod \"bb3270d1-512a-47da-bb29-6a76a3746b40\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.738285 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-config\") pod \"a24292eb-0bdf-4d61-b38f-06510d88d743\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.738313 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-client-ca\") pod \"a24292eb-0bdf-4d61-b38f-06510d88d743\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.738333 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bb3270d1-512a-47da-bb29-6a76a3746b40-client-ca\") pod \"bb3270d1-512a-47da-bb29-6a76a3746b40\" (UID: \"bb3270d1-512a-47da-bb29-6a76a3746b40\") " Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.738365 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pmpn\" (UniqueName: \"kubernetes.io/projected/a24292eb-0bdf-4d61-b38f-06510d88d743-kube-api-access-6pmpn\") pod \"a24292eb-0bdf-4d61-b38f-06510d88d743\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.738406 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-proxy-ca-bundles\") pod \"a24292eb-0bdf-4d61-b38f-06510d88d743\" (UID: \"a24292eb-0bdf-4d61-b38f-06510d88d743\") " Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.738830 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb3270d1-512a-47da-bb29-6a76a3746b40-client-ca" (OuterVolumeSpecName: "client-ca") pod "bb3270d1-512a-47da-bb29-6a76a3746b40" (UID: "bb3270d1-512a-47da-bb29-6a76a3746b40"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.738972 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb3270d1-512a-47da-bb29-6a76a3746b40-config" (OuterVolumeSpecName: "config") pod "bb3270d1-512a-47da-bb29-6a76a3746b40" (UID: "bb3270d1-512a-47da-bb29-6a76a3746b40"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.739152 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a24292eb-0bdf-4d61-b38f-06510d88d743" (UID: "a24292eb-0bdf-4d61-b38f-06510d88d743"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.739169 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-config" (OuterVolumeSpecName: "config") pod "a24292eb-0bdf-4d61-b38f-06510d88d743" (UID: "a24292eb-0bdf-4d61-b38f-06510d88d743"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.739294 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-client-ca" (OuterVolumeSpecName: "client-ca") pod "a24292eb-0bdf-4d61-b38f-06510d88d743" (UID: "a24292eb-0bdf-4d61-b38f-06510d88d743"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.743625 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a24292eb-0bdf-4d61-b38f-06510d88d743-kube-api-access-6pmpn" (OuterVolumeSpecName: "kube-api-access-6pmpn") pod "a24292eb-0bdf-4d61-b38f-06510d88d743" (UID: "a24292eb-0bdf-4d61-b38f-06510d88d743"). InnerVolumeSpecName "kube-api-access-6pmpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.743698 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a24292eb-0bdf-4d61-b38f-06510d88d743-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a24292eb-0bdf-4d61-b38f-06510d88d743" (UID: "a24292eb-0bdf-4d61-b38f-06510d88d743"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.743841 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb3270d1-512a-47da-bb29-6a76a3746b40-kube-api-access-4bt58" (OuterVolumeSpecName: "kube-api-access-4bt58") pod "bb3270d1-512a-47da-bb29-6a76a3746b40" (UID: "bb3270d1-512a-47da-bb29-6a76a3746b40"). InnerVolumeSpecName "kube-api-access-4bt58". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.743940 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb3270d1-512a-47da-bb29-6a76a3746b40-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bb3270d1-512a-47da-bb29-6a76a3746b40" (UID: "bb3270d1-512a-47da-bb29-6a76a3746b40"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.839532 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.839589 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.839607 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bb3270d1-512a-47da-bb29-6a76a3746b40-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.839628 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pmpn\" (UniqueName: \"kubernetes.io/projected/a24292eb-0bdf-4d61-b38f-06510d88d743-kube-api-access-6pmpn\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.839650 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a24292eb-0bdf-4d61-b38f-06510d88d743-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.839668 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb3270d1-512a-47da-bb29-6a76a3746b40-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.839685 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a24292eb-0bdf-4d61-b38f-06510d88d743-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.839704 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb3270d1-512a-47da-bb29-6a76a3746b40-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:20 crc kubenswrapper[4755]: I0202 22:39:20.839722 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bt58\" (UniqueName: \"kubernetes.io/projected/bb3270d1-512a-47da-bb29-6a76a3746b40-kube-api-access-4bt58\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.509693 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" event={"ID":"bb3270d1-512a-47da-bb29-6a76a3746b40","Type":"ContainerDied","Data":"48cdca94d777ca390ed662aa852431f86d5b28fd2800a60c46d5bfb5943f3572"} Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.510455 4755 scope.go:117] "RemoveContainer" containerID="e9c5d33ba674353caa4c2a169e35cdbdb636c2306863ac85e3bc6db66f42886c" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.509776 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.512598 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" event={"ID":"a24292eb-0bdf-4d61-b38f-06510d88d743","Type":"ContainerDied","Data":"096ba93fed471d1dd404b1581c10caffa9d96ef4981f3867227c987668384b6a"} Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.512677 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-544d98fbb8-bz2xr" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.543235 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst"] Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.548174 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd9c86f64-p8wst"] Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.551097 4755 scope.go:117] "RemoveContainer" containerID="f80040a8a786c7d19163742dfd416a435a2bd5a4938a8c54c6022ae824be7895" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.561476 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-544d98fbb8-bz2xr"] Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.568585 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-544d98fbb8-bz2xr"] Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.761681 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-cc8d77586-vsqln"] Feb 02 22:39:21 crc kubenswrapper[4755]: E0202 22:39:21.762024 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a24292eb-0bdf-4d61-b38f-06510d88d743" containerName="controller-manager" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.762047 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a24292eb-0bdf-4d61-b38f-06510d88d743" containerName="controller-manager" Feb 02 22:39:21 crc kubenswrapper[4755]: E0202 22:39:21.762079 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb3270d1-512a-47da-bb29-6a76a3746b40" containerName="route-controller-manager" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.762092 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb3270d1-512a-47da-bb29-6a76a3746b40" containerName="route-controller-manager" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.763066 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb3270d1-512a-47da-bb29-6a76a3746b40" containerName="route-controller-manager" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.763119 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a24292eb-0bdf-4d61-b38f-06510d88d743" containerName="controller-manager" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.763775 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.766536 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.767116 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.768576 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.768810 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.768984 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.769119 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.783197 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.790911 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm"] Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.791760 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.796299 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.804038 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.804329 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.804529 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.804658 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.804799 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.806530 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm"] Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.811198 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-cc8d77586-vsqln"] Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.955267 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-proxy-ca-bundles\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.955321 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a544da5-0eee-4277-9637-232bd8ffa9cb-serving-cert\") pod \"route-controller-manager-79cb97567b-5nswm\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.955357 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a544da5-0eee-4277-9637-232bd8ffa9cb-config\") pod \"route-controller-manager-79cb97567b-5nswm\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.955386 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-client-ca\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.955419 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a544da5-0eee-4277-9637-232bd8ffa9cb-client-ca\") pod \"route-controller-manager-79cb97567b-5nswm\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.955444 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-config\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.955464 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5xll\" (UniqueName: \"kubernetes.io/projected/63a95339-526e-44ef-8b36-ed273250fdff-kube-api-access-l5xll\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.955498 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63a95339-526e-44ef-8b36-ed273250fdff-serving-cert\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:21 crc kubenswrapper[4755]: I0202 22:39:21.955522 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpcsf\" (UniqueName: \"kubernetes.io/projected/3a544da5-0eee-4277-9637-232bd8ffa9cb-kube-api-access-gpcsf\") pod \"route-controller-manager-79cb97567b-5nswm\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.056534 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-proxy-ca-bundles\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.056578 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a544da5-0eee-4277-9637-232bd8ffa9cb-serving-cert\") pod \"route-controller-manager-79cb97567b-5nswm\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.056601 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a544da5-0eee-4277-9637-232bd8ffa9cb-config\") pod \"route-controller-manager-79cb97567b-5nswm\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.056625 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-client-ca\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.056649 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a544da5-0eee-4277-9637-232bd8ffa9cb-client-ca\") pod \"route-controller-manager-79cb97567b-5nswm\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.056666 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-config\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.056684 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5xll\" (UniqueName: \"kubernetes.io/projected/63a95339-526e-44ef-8b36-ed273250fdff-kube-api-access-l5xll\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.056709 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63a95339-526e-44ef-8b36-ed273250fdff-serving-cert\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.056742 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpcsf\" (UniqueName: \"kubernetes.io/projected/3a544da5-0eee-4277-9637-232bd8ffa9cb-kube-api-access-gpcsf\") pod \"route-controller-manager-79cb97567b-5nswm\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.058276 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a544da5-0eee-4277-9637-232bd8ffa9cb-config\") pod \"route-controller-manager-79cb97567b-5nswm\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.058418 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-client-ca\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.058426 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-proxy-ca-bundles\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.058593 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-config\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.058847 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a544da5-0eee-4277-9637-232bd8ffa9cb-client-ca\") pod \"route-controller-manager-79cb97567b-5nswm\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.062431 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63a95339-526e-44ef-8b36-ed273250fdff-serving-cert\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.063768 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a544da5-0eee-4277-9637-232bd8ffa9cb-serving-cert\") pod \"route-controller-manager-79cb97567b-5nswm\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.072539 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpcsf\" (UniqueName: \"kubernetes.io/projected/3a544da5-0eee-4277-9637-232bd8ffa9cb-kube-api-access-gpcsf\") pod \"route-controller-manager-79cb97567b-5nswm\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.078568 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5xll\" (UniqueName: \"kubernetes.io/projected/63a95339-526e-44ef-8b36-ed273250fdff-kube-api-access-l5xll\") pod \"controller-manager-cc8d77586-vsqln\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.096446 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.116758 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.542610 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-cc8d77586-vsqln"] Feb 02 22:39:22 crc kubenswrapper[4755]: W0202 22:39:22.551035 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63a95339_526e_44ef_8b36_ed273250fdff.slice/crio-183a77ef85e489078ef1846e5f41c07c5efadba3f83610d320229f445e2f8056 WatchSource:0}: Error finding container 183a77ef85e489078ef1846e5f41c07c5efadba3f83610d320229f445e2f8056: Status 404 returned error can't find the container with id 183a77ef85e489078ef1846e5f41c07c5efadba3f83610d320229f445e2f8056 Feb 02 22:39:22 crc kubenswrapper[4755]: I0202 22:39:22.606262 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm"] Feb 02 22:39:22 crc kubenswrapper[4755]: W0202 22:39:22.615372 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a544da5_0eee_4277_9637_232bd8ffa9cb.slice/crio-4d9bde432602eac94ec87f2ee43447d1dcebb6590b51930e3f0a41465ebea82b WatchSource:0}: Error finding container 4d9bde432602eac94ec87f2ee43447d1dcebb6590b51930e3f0a41465ebea82b: Status 404 returned error can't find the container with id 4d9bde432602eac94ec87f2ee43447d1dcebb6590b51930e3f0a41465ebea82b Feb 02 22:39:23 crc kubenswrapper[4755]: I0202 22:39:23.075304 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a24292eb-0bdf-4d61-b38f-06510d88d743" path="/var/lib/kubelet/pods/a24292eb-0bdf-4d61-b38f-06510d88d743/volumes" Feb 02 22:39:23 crc kubenswrapper[4755]: I0202 22:39:23.076347 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb3270d1-512a-47da-bb29-6a76a3746b40" path="/var/lib/kubelet/pods/bb3270d1-512a-47da-bb29-6a76a3746b40/volumes" Feb 02 22:39:23 crc kubenswrapper[4755]: I0202 22:39:23.529929 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" event={"ID":"3a544da5-0eee-4277-9637-232bd8ffa9cb","Type":"ContainerStarted","Data":"ce31c674f686ef143bc7f9daf3a69e2098b613b90096e0fb863cbe306502dcd3"} Feb 02 22:39:23 crc kubenswrapper[4755]: I0202 22:39:23.529979 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" event={"ID":"3a544da5-0eee-4277-9637-232bd8ffa9cb","Type":"ContainerStarted","Data":"4d9bde432602eac94ec87f2ee43447d1dcebb6590b51930e3f0a41465ebea82b"} Feb 02 22:39:23 crc kubenswrapper[4755]: I0202 22:39:23.530000 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:23 crc kubenswrapper[4755]: I0202 22:39:23.531124 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" event={"ID":"63a95339-526e-44ef-8b36-ed273250fdff","Type":"ContainerStarted","Data":"6ec3142e7d7f96b1eac97856580859ffdfdec03e2e4b14689d0d2f2b2dd6a891"} Feb 02 22:39:23 crc kubenswrapper[4755]: I0202 22:39:23.531178 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" event={"ID":"63a95339-526e-44ef-8b36-ed273250fdff","Type":"ContainerStarted","Data":"183a77ef85e489078ef1846e5f41c07c5efadba3f83610d320229f445e2f8056"} Feb 02 22:39:23 crc kubenswrapper[4755]: I0202 22:39:23.531687 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:23 crc kubenswrapper[4755]: I0202 22:39:23.536017 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:23 crc kubenswrapper[4755]: I0202 22:39:23.538662 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:23 crc kubenswrapper[4755]: I0202 22:39:23.551685 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" podStartSLOduration=3.551667208 podStartE2EDuration="3.551667208s" podCreationTimestamp="2026-02-02 22:39:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:39:23.550538501 +0000 UTC m=+319.241758837" watchObservedRunningTime="2026-02-02 22:39:23.551667208 +0000 UTC m=+319.242887534" Feb 02 22:39:23 crc kubenswrapper[4755]: I0202 22:39:23.611313 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" podStartSLOduration=3.611288948 podStartE2EDuration="3.611288948s" podCreationTimestamp="2026-02-02 22:39:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:39:23.606821792 +0000 UTC m=+319.298042148" watchObservedRunningTime="2026-02-02 22:39:23.611288948 +0000 UTC m=+319.302509314" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.426789 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7clsl"] Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.428146 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.440708 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7clsl"] Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.608857 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7072800-eb6f-4f54-a24f-58b6f877ffab-trusted-ca\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.608912 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7072800-eb6f-4f54-a24f-58b6f877ffab-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.608935 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7072800-eb6f-4f54-a24f-58b6f877ffab-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.608958 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7072800-eb6f-4f54-a24f-58b6f877ffab-registry-tls\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.608994 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.609022 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dn8w\" (UniqueName: \"kubernetes.io/projected/e7072800-eb6f-4f54-a24f-58b6f877ffab-kube-api-access-4dn8w\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.609051 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7072800-eb6f-4f54-a24f-58b6f877ffab-registry-certificates\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.609076 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7072800-eb6f-4f54-a24f-58b6f877ffab-bound-sa-token\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.649062 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.711993 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7072800-eb6f-4f54-a24f-58b6f877ffab-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.712181 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7072800-eb6f-4f54-a24f-58b6f877ffab-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.712255 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7072800-eb6f-4f54-a24f-58b6f877ffab-registry-tls\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.712418 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dn8w\" (UniqueName: \"kubernetes.io/projected/e7072800-eb6f-4f54-a24f-58b6f877ffab-kube-api-access-4dn8w\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.712691 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7072800-eb6f-4f54-a24f-58b6f877ffab-registry-certificates\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.713490 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e7072800-eb6f-4f54-a24f-58b6f877ffab-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.715050 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e7072800-eb6f-4f54-a24f-58b6f877ffab-registry-certificates\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.715133 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7072800-eb6f-4f54-a24f-58b6f877ffab-bound-sa-token\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.715233 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7072800-eb6f-4f54-a24f-58b6f877ffab-trusted-ca\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.718392 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e7072800-eb6f-4f54-a24f-58b6f877ffab-trusted-ca\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.718528 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e7072800-eb6f-4f54-a24f-58b6f877ffab-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.723099 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e7072800-eb6f-4f54-a24f-58b6f877ffab-registry-tls\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.736465 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dn8w\" (UniqueName: \"kubernetes.io/projected/e7072800-eb6f-4f54-a24f-58b6f877ffab-kube-api-access-4dn8w\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.738266 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e7072800-eb6f-4f54-a24f-58b6f877ffab-bound-sa-token\") pod \"image-registry-66df7c8f76-7clsl\" (UID: \"e7072800-eb6f-4f54-a24f-58b6f877ffab\") " pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:36 crc kubenswrapper[4755]: I0202 22:39:36.749935 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:37 crc kubenswrapper[4755]: I0202 22:39:37.202763 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7clsl"] Feb 02 22:39:37 crc kubenswrapper[4755]: I0202 22:39:37.615498 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" event={"ID":"e7072800-eb6f-4f54-a24f-58b6f877ffab","Type":"ContainerStarted","Data":"d20b14b9ed144ff2ba4926a5d902b926548145a83800fdae537a837fb9e1c049"} Feb 02 22:39:37 crc kubenswrapper[4755]: I0202 22:39:37.615842 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" event={"ID":"e7072800-eb6f-4f54-a24f-58b6f877ffab","Type":"ContainerStarted","Data":"fbe6b2613301d216ac8b95566e9b23ec2a8b8797867dfb7a0cc4ffa98682a933"} Feb 02 22:39:37 crc kubenswrapper[4755]: I0202 22:39:37.615889 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:37 crc kubenswrapper[4755]: I0202 22:39:37.652792 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" podStartSLOduration=1.65276489 podStartE2EDuration="1.65276489s" podCreationTimestamp="2026-02-02 22:39:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:39:37.644344087 +0000 UTC m=+333.335564453" watchObservedRunningTime="2026-02-02 22:39:37.65276489 +0000 UTC m=+333.343985246" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.031582 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-cc8d77586-vsqln"] Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.032629 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" podUID="63a95339-526e-44ef-8b36-ed273250fdff" containerName="controller-manager" containerID="cri-o://6ec3142e7d7f96b1eac97856580859ffdfdec03e2e4b14689d0d2f2b2dd6a891" gracePeriod=30 Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.064769 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm"] Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.065023 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" podUID="3a544da5-0eee-4277-9637-232bd8ffa9cb" containerName="route-controller-manager" containerID="cri-o://ce31c674f686ef143bc7f9daf3a69e2098b613b90096e0fb863cbe306502dcd3" gracePeriod=30 Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.644607 4755 generic.go:334] "Generic (PLEG): container finished" podID="3a544da5-0eee-4277-9637-232bd8ffa9cb" containerID="ce31c674f686ef143bc7f9daf3a69e2098b613b90096e0fb863cbe306502dcd3" exitCode=0 Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.644686 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" event={"ID":"3a544da5-0eee-4277-9637-232bd8ffa9cb","Type":"ContainerDied","Data":"ce31c674f686ef143bc7f9daf3a69e2098b613b90096e0fb863cbe306502dcd3"} Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.645260 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" event={"ID":"3a544da5-0eee-4277-9637-232bd8ffa9cb","Type":"ContainerDied","Data":"4d9bde432602eac94ec87f2ee43447d1dcebb6590b51930e3f0a41465ebea82b"} Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.645355 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d9bde432602eac94ec87f2ee43447d1dcebb6590b51930e3f0a41465ebea82b" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.646769 4755 generic.go:334] "Generic (PLEG): container finished" podID="63a95339-526e-44ef-8b36-ed273250fdff" containerID="6ec3142e7d7f96b1eac97856580859ffdfdec03e2e4b14689d0d2f2b2dd6a891" exitCode=0 Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.646875 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" event={"ID":"63a95339-526e-44ef-8b36-ed273250fdff","Type":"ContainerDied","Data":"6ec3142e7d7f96b1eac97856580859ffdfdec03e2e4b14689d0d2f2b2dd6a891"} Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.646987 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" event={"ID":"63a95339-526e-44ef-8b36-ed273250fdff","Type":"ContainerDied","Data":"183a77ef85e489078ef1846e5f41c07c5efadba3f83610d320229f445e2f8056"} Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.647075 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="183a77ef85e489078ef1846e5f41c07c5efadba3f83610d320229f445e2f8056" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.658598 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.664315 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.780555 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a544da5-0eee-4277-9637-232bd8ffa9cb-serving-cert\") pod \"3a544da5-0eee-4277-9637-232bd8ffa9cb\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.780633 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5xll\" (UniqueName: \"kubernetes.io/projected/63a95339-526e-44ef-8b36-ed273250fdff-kube-api-access-l5xll\") pod \"63a95339-526e-44ef-8b36-ed273250fdff\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.780667 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpcsf\" (UniqueName: \"kubernetes.io/projected/3a544da5-0eee-4277-9637-232bd8ffa9cb-kube-api-access-gpcsf\") pod \"3a544da5-0eee-4277-9637-232bd8ffa9cb\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.780690 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-config\") pod \"63a95339-526e-44ef-8b36-ed273250fdff\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.780712 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a544da5-0eee-4277-9637-232bd8ffa9cb-client-ca\") pod \"3a544da5-0eee-4277-9637-232bd8ffa9cb\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.780756 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-client-ca\") pod \"63a95339-526e-44ef-8b36-ed273250fdff\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.780772 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-proxy-ca-bundles\") pod \"63a95339-526e-44ef-8b36-ed273250fdff\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.780794 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a544da5-0eee-4277-9637-232bd8ffa9cb-config\") pod \"3a544da5-0eee-4277-9637-232bd8ffa9cb\" (UID: \"3a544da5-0eee-4277-9637-232bd8ffa9cb\") " Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.780824 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63a95339-526e-44ef-8b36-ed273250fdff-serving-cert\") pod \"63a95339-526e-44ef-8b36-ed273250fdff\" (UID: \"63a95339-526e-44ef-8b36-ed273250fdff\") " Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.782156 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-config" (OuterVolumeSpecName: "config") pod "63a95339-526e-44ef-8b36-ed273250fdff" (UID: "63a95339-526e-44ef-8b36-ed273250fdff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.782288 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a544da5-0eee-4277-9637-232bd8ffa9cb-client-ca" (OuterVolumeSpecName: "client-ca") pod "3a544da5-0eee-4277-9637-232bd8ffa9cb" (UID: "3a544da5-0eee-4277-9637-232bd8ffa9cb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.782654 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "63a95339-526e-44ef-8b36-ed273250fdff" (UID: "63a95339-526e-44ef-8b36-ed273250fdff"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.782878 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a544da5-0eee-4277-9637-232bd8ffa9cb-config" (OuterVolumeSpecName: "config") pod "3a544da5-0eee-4277-9637-232bd8ffa9cb" (UID: "3a544da5-0eee-4277-9637-232bd8ffa9cb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.783126 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-client-ca" (OuterVolumeSpecName: "client-ca") pod "63a95339-526e-44ef-8b36-ed273250fdff" (UID: "63a95339-526e-44ef-8b36-ed273250fdff"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.789077 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a544da5-0eee-4277-9637-232bd8ffa9cb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3a544da5-0eee-4277-9637-232bd8ffa9cb" (UID: "3a544da5-0eee-4277-9637-232bd8ffa9cb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.789293 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63a95339-526e-44ef-8b36-ed273250fdff-kube-api-access-l5xll" (OuterVolumeSpecName: "kube-api-access-l5xll") pod "63a95339-526e-44ef-8b36-ed273250fdff" (UID: "63a95339-526e-44ef-8b36-ed273250fdff"). InnerVolumeSpecName "kube-api-access-l5xll". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.789439 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a544da5-0eee-4277-9637-232bd8ffa9cb-kube-api-access-gpcsf" (OuterVolumeSpecName: "kube-api-access-gpcsf") pod "3a544da5-0eee-4277-9637-232bd8ffa9cb" (UID: "3a544da5-0eee-4277-9637-232bd8ffa9cb"). InnerVolumeSpecName "kube-api-access-gpcsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.798265 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63a95339-526e-44ef-8b36-ed273250fdff-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "63a95339-526e-44ef-8b36-ed273250fdff" (UID: "63a95339-526e-44ef-8b36-ed273250fdff"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.882237 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a544da5-0eee-4277-9637-232bd8ffa9cb-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.882297 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5xll\" (UniqueName: \"kubernetes.io/projected/63a95339-526e-44ef-8b36-ed273250fdff-kube-api-access-l5xll\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.882312 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpcsf\" (UniqueName: \"kubernetes.io/projected/3a544da5-0eee-4277-9637-232bd8ffa9cb-kube-api-access-gpcsf\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.882325 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.882337 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a544da5-0eee-4277-9637-232bd8ffa9cb-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.882348 4755 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.882359 4755 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/63a95339-526e-44ef-8b36-ed273250fdff-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.882371 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a544da5-0eee-4277-9637-232bd8ffa9cb-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:40 crc kubenswrapper[4755]: I0202 22:39:40.882383 4755 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63a95339-526e-44ef-8b36-ed273250fdff-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.651697 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-cc8d77586-vsqln" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.651701 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.670446 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-cc8d77586-vsqln"] Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.674842 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-cc8d77586-vsqln"] Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.685001 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm"] Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.685050 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-79cb97567b-5nswm"] Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.769874 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-544d98fbb8-gtcl8"] Feb 02 22:39:41 crc kubenswrapper[4755]: E0202 22:39:41.770205 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a544da5-0eee-4277-9637-232bd8ffa9cb" containerName="route-controller-manager" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.770259 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a544da5-0eee-4277-9637-232bd8ffa9cb" containerName="route-controller-manager" Feb 02 22:39:41 crc kubenswrapper[4755]: E0202 22:39:41.770274 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63a95339-526e-44ef-8b36-ed273250fdff" containerName="controller-manager" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.770282 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="63a95339-526e-44ef-8b36-ed273250fdff" containerName="controller-manager" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.770382 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a544da5-0eee-4277-9637-232bd8ffa9cb" containerName="route-controller-manager" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.770394 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="63a95339-526e-44ef-8b36-ed273250fdff" containerName="controller-manager" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.770810 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.773242 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg"] Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.774204 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.778251 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.778483 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.778295 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.778802 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.778323 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.779097 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.779175 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.778402 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.780758 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg"] Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.778440 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.778738 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.779027 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.781208 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.783028 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-544d98fbb8-gtcl8"] Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.783069 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.904778 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m65x6\" (UniqueName: \"kubernetes.io/projected/92abb98c-e0e1-4817-8c37-23c352e61051-kube-api-access-m65x6\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.904847 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92abb98c-e0e1-4817-8c37-23c352e61051-serving-cert\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.904869 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92abb98c-e0e1-4817-8c37-23c352e61051-config\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.904890 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/92abb98c-e0e1-4817-8c37-23c352e61051-client-ca\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.904931 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/92abb98c-e0e1-4817-8c37-23c352e61051-proxy-ca-bundles\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.905007 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcwg7\" (UniqueName: \"kubernetes.io/projected/e52605c1-8f8c-43c6-a517-cb92b0a5fb5b-kube-api-access-fcwg7\") pod \"route-controller-manager-cd9c86f64-vqvzg\" (UID: \"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.905027 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e52605c1-8f8c-43c6-a517-cb92b0a5fb5b-config\") pod \"route-controller-manager-cd9c86f64-vqvzg\" (UID: \"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.905048 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e52605c1-8f8c-43c6-a517-cb92b0a5fb5b-client-ca\") pod \"route-controller-manager-cd9c86f64-vqvzg\" (UID: \"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:41 crc kubenswrapper[4755]: I0202 22:39:41.905066 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e52605c1-8f8c-43c6-a517-cb92b0a5fb5b-serving-cert\") pod \"route-controller-manager-cd9c86f64-vqvzg\" (UID: \"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.005991 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/92abb98c-e0e1-4817-8c37-23c352e61051-proxy-ca-bundles\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.006056 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcwg7\" (UniqueName: \"kubernetes.io/projected/e52605c1-8f8c-43c6-a517-cb92b0a5fb5b-kube-api-access-fcwg7\") pod \"route-controller-manager-cd9c86f64-vqvzg\" (UID: \"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.006079 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e52605c1-8f8c-43c6-a517-cb92b0a5fb5b-config\") pod \"route-controller-manager-cd9c86f64-vqvzg\" (UID: \"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.006100 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e52605c1-8f8c-43c6-a517-cb92b0a5fb5b-client-ca\") pod \"route-controller-manager-cd9c86f64-vqvzg\" (UID: \"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.006118 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e52605c1-8f8c-43c6-a517-cb92b0a5fb5b-serving-cert\") pod \"route-controller-manager-cd9c86f64-vqvzg\" (UID: \"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.006141 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m65x6\" (UniqueName: \"kubernetes.io/projected/92abb98c-e0e1-4817-8c37-23c352e61051-kube-api-access-m65x6\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.006185 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92abb98c-e0e1-4817-8c37-23c352e61051-serving-cert\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.006202 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92abb98c-e0e1-4817-8c37-23c352e61051-config\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.006221 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/92abb98c-e0e1-4817-8c37-23c352e61051-client-ca\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.007102 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/92abb98c-e0e1-4817-8c37-23c352e61051-client-ca\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.007167 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/92abb98c-e0e1-4817-8c37-23c352e61051-proxy-ca-bundles\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.007305 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e52605c1-8f8c-43c6-a517-cb92b0a5fb5b-client-ca\") pod \"route-controller-manager-cd9c86f64-vqvzg\" (UID: \"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.007960 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e52605c1-8f8c-43c6-a517-cb92b0a5fb5b-config\") pod \"route-controller-manager-cd9c86f64-vqvzg\" (UID: \"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.007985 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92abb98c-e0e1-4817-8c37-23c352e61051-config\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.010457 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e52605c1-8f8c-43c6-a517-cb92b0a5fb5b-serving-cert\") pod \"route-controller-manager-cd9c86f64-vqvzg\" (UID: \"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.010682 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92abb98c-e0e1-4817-8c37-23c352e61051-serving-cert\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.027344 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m65x6\" (UniqueName: \"kubernetes.io/projected/92abb98c-e0e1-4817-8c37-23c352e61051-kube-api-access-m65x6\") pod \"controller-manager-544d98fbb8-gtcl8\" (UID: \"92abb98c-e0e1-4817-8c37-23c352e61051\") " pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.033536 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcwg7\" (UniqueName: \"kubernetes.io/projected/e52605c1-8f8c-43c6-a517-cb92b0a5fb5b-kube-api-access-fcwg7\") pod \"route-controller-manager-cd9c86f64-vqvzg\" (UID: \"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b\") " pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.090833 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.098227 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.511059 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-544d98fbb8-gtcl8"] Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.567027 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg"] Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.657220 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" event={"ID":"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b","Type":"ContainerStarted","Data":"5f22f62b0d6a407a8ec3706311075b38fdf7a0a5b49e58dd0ef8ae5e85bb57fb"} Feb 02 22:39:42 crc kubenswrapper[4755]: I0202 22:39:42.658215 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" event={"ID":"92abb98c-e0e1-4817-8c37-23c352e61051","Type":"ContainerStarted","Data":"c493ce39f0473ccdf9d37f50aa2d67bc14423e5e60d687e7630c615a18e1e4ed"} Feb 02 22:39:43 crc kubenswrapper[4755]: I0202 22:39:43.075196 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a544da5-0eee-4277-9637-232bd8ffa9cb" path="/var/lib/kubelet/pods/3a544da5-0eee-4277-9637-232bd8ffa9cb/volumes" Feb 02 22:39:43 crc kubenswrapper[4755]: I0202 22:39:43.075841 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63a95339-526e-44ef-8b36-ed273250fdff" path="/var/lib/kubelet/pods/63a95339-526e-44ef-8b36-ed273250fdff/volumes" Feb 02 22:39:43 crc kubenswrapper[4755]: I0202 22:39:43.666449 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" event={"ID":"e52605c1-8f8c-43c6-a517-cb92b0a5fb5b","Type":"ContainerStarted","Data":"2c0e71705ead42e07a0a69461f78915b5a26bbda534edc2ea369059c6694cdf9"} Feb 02 22:39:43 crc kubenswrapper[4755]: I0202 22:39:43.666854 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:43 crc kubenswrapper[4755]: I0202 22:39:43.668529 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" event={"ID":"92abb98c-e0e1-4817-8c37-23c352e61051","Type":"ContainerStarted","Data":"3015928aa7e9a3518679e920fe65f9b917c396aeb67ecea74e05513d8bc330d7"} Feb 02 22:39:43 crc kubenswrapper[4755]: I0202 22:39:43.668900 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:43 crc kubenswrapper[4755]: I0202 22:39:43.674945 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" Feb 02 22:39:43 crc kubenswrapper[4755]: I0202 22:39:43.677814 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" Feb 02 22:39:43 crc kubenswrapper[4755]: I0202 22:39:43.685302 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-cd9c86f64-vqvzg" podStartSLOduration=3.685283122 podStartE2EDuration="3.685283122s" podCreationTimestamp="2026-02-02 22:39:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:39:43.682618805 +0000 UTC m=+339.373839181" watchObservedRunningTime="2026-02-02 22:39:43.685283122 +0000 UTC m=+339.376503478" Feb 02 22:39:43 crc kubenswrapper[4755]: I0202 22:39:43.710472 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-544d98fbb8-gtcl8" podStartSLOduration=3.710440446 podStartE2EDuration="3.710440446s" podCreationTimestamp="2026-02-02 22:39:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:39:43.708877151 +0000 UTC m=+339.400097537" watchObservedRunningTime="2026-02-02 22:39:43.710440446 +0000 UTC m=+339.401660812" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.277717 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9k7fx"] Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.278690 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9k7fx" podUID="2025f58f-a8e1-4009-a95b-946aca049871" containerName="registry-server" containerID="cri-o://b4e4a7206f9adfc2965df1140c3bbb7e5ad975592de4c1b15fd1907ba58d5add" gracePeriod=30 Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.291329 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-964gh"] Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.291986 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-964gh" podUID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" containerName="registry-server" containerID="cri-o://f5293d7deb4b1c941325897664b50e44b1e6016d741c3f25d47daede2b025b5c" gracePeriod=30 Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.314245 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hnz5f"] Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.314559 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" podUID="467ef27d-8f51-4317-80ee-9071d7024f86" containerName="marketplace-operator" containerID="cri-o://80ea541105c28da2ae11e17fb34aaf04bb86c51a802d6621ddf4954c92bdfaae" gracePeriod=30 Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.327523 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g4db9"] Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.328045 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g4db9" podUID="378f0e3d-577f-4e51-a994-411d062c9fba" containerName="registry-server" containerID="cri-o://c19fc5733662fc0a3b611cc988fcf6be578d23aa4143aeac706b1098bdf6d3cd" gracePeriod=30 Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.342865 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4hn7q"] Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.343259 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4hn7q" podUID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" containerName="registry-server" containerID="cri-o://5deff95394140bce4d5ccd1cdd4205a7b19b855cee35d71b424737a91bd19b1f" gracePeriod=30 Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.354910 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6spmr"] Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.355907 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.359966 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6spmr"] Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.455943 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b30941eb-134e-4f00-9501-e8f8f47e9822-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6spmr\" (UID: \"b30941eb-134e-4f00-9501-e8f8f47e9822\") " pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.456019 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsjw5\" (UniqueName: \"kubernetes.io/projected/b30941eb-134e-4f00-9501-e8f8f47e9822-kube-api-access-bsjw5\") pod \"marketplace-operator-79b997595-6spmr\" (UID: \"b30941eb-134e-4f00-9501-e8f8f47e9822\") " pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.456054 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b30941eb-134e-4f00-9501-e8f8f47e9822-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6spmr\" (UID: \"b30941eb-134e-4f00-9501-e8f8f47e9822\") " pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.557521 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b30941eb-134e-4f00-9501-e8f8f47e9822-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6spmr\" (UID: \"b30941eb-134e-4f00-9501-e8f8f47e9822\") " pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.557571 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsjw5\" (UniqueName: \"kubernetes.io/projected/b30941eb-134e-4f00-9501-e8f8f47e9822-kube-api-access-bsjw5\") pod \"marketplace-operator-79b997595-6spmr\" (UID: \"b30941eb-134e-4f00-9501-e8f8f47e9822\") " pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.557599 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b30941eb-134e-4f00-9501-e8f8f47e9822-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6spmr\" (UID: \"b30941eb-134e-4f00-9501-e8f8f47e9822\") " pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.558682 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b30941eb-134e-4f00-9501-e8f8f47e9822-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6spmr\" (UID: \"b30941eb-134e-4f00-9501-e8f8f47e9822\") " pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.563033 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b30941eb-134e-4f00-9501-e8f8f47e9822-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6spmr\" (UID: \"b30941eb-134e-4f00-9501-e8f8f47e9822\") " pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.582320 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsjw5\" (UniqueName: \"kubernetes.io/projected/b30941eb-134e-4f00-9501-e8f8f47e9822-kube-api-access-bsjw5\") pod \"marketplace-operator-79b997595-6spmr\" (UID: \"b30941eb-134e-4f00-9501-e8f8f47e9822\") " pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.723384 4755 generic.go:334] "Generic (PLEG): container finished" podID="2025f58f-a8e1-4009-a95b-946aca049871" containerID="b4e4a7206f9adfc2965df1140c3bbb7e5ad975592de4c1b15fd1907ba58d5add" exitCode=0 Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.723460 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9k7fx" event={"ID":"2025f58f-a8e1-4009-a95b-946aca049871","Type":"ContainerDied","Data":"b4e4a7206f9adfc2965df1140c3bbb7e5ad975592de4c1b15fd1907ba58d5add"} Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.726437 4755 generic.go:334] "Generic (PLEG): container finished" podID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" containerID="f5293d7deb4b1c941325897664b50e44b1e6016d741c3f25d47daede2b025b5c" exitCode=0 Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.726633 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-964gh" event={"ID":"83ede4e1-292f-40c0-8e1f-cc44190a0c92","Type":"ContainerDied","Data":"f5293d7deb4b1c941325897664b50e44b1e6016d741c3f25d47daede2b025b5c"} Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.729341 4755 generic.go:334] "Generic (PLEG): container finished" podID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" containerID="5deff95394140bce4d5ccd1cdd4205a7b19b855cee35d71b424737a91bd19b1f" exitCode=0 Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.729424 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4hn7q" event={"ID":"4e73c4d3-3b24-40d0-af22-fbf37ec4716d","Type":"ContainerDied","Data":"5deff95394140bce4d5ccd1cdd4205a7b19b855cee35d71b424737a91bd19b1f"} Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.731289 4755 generic.go:334] "Generic (PLEG): container finished" podID="467ef27d-8f51-4317-80ee-9071d7024f86" containerID="80ea541105c28da2ae11e17fb34aaf04bb86c51a802d6621ddf4954c92bdfaae" exitCode=0 Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.731372 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" event={"ID":"467ef27d-8f51-4317-80ee-9071d7024f86","Type":"ContainerDied","Data":"80ea541105c28da2ae11e17fb34aaf04bb86c51a802d6621ddf4954c92bdfaae"} Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.731412 4755 scope.go:117] "RemoveContainer" containerID="3a7e23b649f364d0d3ee753c085c1a297d22374e6a1c7c434e47a67c95325cd8" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.740127 4755 generic.go:334] "Generic (PLEG): container finished" podID="378f0e3d-577f-4e51-a994-411d062c9fba" containerID="c19fc5733662fc0a3b611cc988fcf6be578d23aa4143aeac706b1098bdf6d3cd" exitCode=0 Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.740195 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4db9" event={"ID":"378f0e3d-577f-4e51-a994-411d062c9fba","Type":"ContainerDied","Data":"c19fc5733662fc0a3b611cc988fcf6be578d23aa4143aeac706b1098bdf6d3cd"} Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.754149 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.826419 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.962473 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wk4p\" (UniqueName: \"kubernetes.io/projected/2025f58f-a8e1-4009-a95b-946aca049871-kube-api-access-2wk4p\") pod \"2025f58f-a8e1-4009-a95b-946aca049871\" (UID: \"2025f58f-a8e1-4009-a95b-946aca049871\") " Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.962539 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2025f58f-a8e1-4009-a95b-946aca049871-utilities\") pod \"2025f58f-a8e1-4009-a95b-946aca049871\" (UID: \"2025f58f-a8e1-4009-a95b-946aca049871\") " Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.962573 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2025f58f-a8e1-4009-a95b-946aca049871-catalog-content\") pod \"2025f58f-a8e1-4009-a95b-946aca049871\" (UID: \"2025f58f-a8e1-4009-a95b-946aca049871\") " Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.963653 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2025f58f-a8e1-4009-a95b-946aca049871-utilities" (OuterVolumeSpecName: "utilities") pod "2025f58f-a8e1-4009-a95b-946aca049871" (UID: "2025f58f-a8e1-4009-a95b-946aca049871"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:39:51 crc kubenswrapper[4755]: I0202 22:39:51.974942 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2025f58f-a8e1-4009-a95b-946aca049871-kube-api-access-2wk4p" (OuterVolumeSpecName: "kube-api-access-2wk4p") pod "2025f58f-a8e1-4009-a95b-946aca049871" (UID: "2025f58f-a8e1-4009-a95b-946aca049871"). InnerVolumeSpecName "kube-api-access-2wk4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.038542 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-964gh" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.042865 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.063583 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2025f58f-a8e1-4009-a95b-946aca049871-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.063609 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wk4p\" (UniqueName: \"kubernetes.io/projected/2025f58f-a8e1-4009-a95b-946aca049871-kube-api-access-2wk4p\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.064328 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.065189 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2025f58f-a8e1-4009-a95b-946aca049871-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2025f58f-a8e1-4009-a95b-946aca049871" (UID: "2025f58f-a8e1-4009-a95b-946aca049871"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.066564 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.164452 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b6qc\" (UniqueName: \"kubernetes.io/projected/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-kube-api-access-2b6qc\") pod \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\" (UID: \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\") " Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.164504 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl6rv\" (UniqueName: \"kubernetes.io/projected/467ef27d-8f51-4317-80ee-9071d7024f86-kube-api-access-dl6rv\") pod \"467ef27d-8f51-4317-80ee-9071d7024f86\" (UID: \"467ef27d-8f51-4317-80ee-9071d7024f86\") " Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.164543 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/467ef27d-8f51-4317-80ee-9071d7024f86-marketplace-trusted-ca\") pod \"467ef27d-8f51-4317-80ee-9071d7024f86\" (UID: \"467ef27d-8f51-4317-80ee-9071d7024f86\") " Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.164583 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378f0e3d-577f-4e51-a994-411d062c9fba-utilities\") pod \"378f0e3d-577f-4e51-a994-411d062c9fba\" (UID: \"378f0e3d-577f-4e51-a994-411d062c9fba\") " Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.164635 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/467ef27d-8f51-4317-80ee-9071d7024f86-marketplace-operator-metrics\") pod \"467ef27d-8f51-4317-80ee-9071d7024f86\" (UID: \"467ef27d-8f51-4317-80ee-9071d7024f86\") " Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.164672 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpn7j\" (UniqueName: \"kubernetes.io/projected/83ede4e1-292f-40c0-8e1f-cc44190a0c92-kube-api-access-qpn7j\") pod \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\" (UID: \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\") " Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.164713 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdbft\" (UniqueName: \"kubernetes.io/projected/378f0e3d-577f-4e51-a994-411d062c9fba-kube-api-access-pdbft\") pod \"378f0e3d-577f-4e51-a994-411d062c9fba\" (UID: \"378f0e3d-577f-4e51-a994-411d062c9fba\") " Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.164780 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-utilities\") pod \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\" (UID: \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\") " Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.164826 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ede4e1-292f-40c0-8e1f-cc44190a0c92-catalog-content\") pod \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\" (UID: \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\") " Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.164868 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378f0e3d-577f-4e51-a994-411d062c9fba-catalog-content\") pod \"378f0e3d-577f-4e51-a994-411d062c9fba\" (UID: \"378f0e3d-577f-4e51-a994-411d062c9fba\") " Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.164902 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-catalog-content\") pod \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\" (UID: \"4e73c4d3-3b24-40d0-af22-fbf37ec4716d\") " Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.164932 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ede4e1-292f-40c0-8e1f-cc44190a0c92-utilities\") pod \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\" (UID: \"83ede4e1-292f-40c0-8e1f-cc44190a0c92\") " Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.165209 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2025f58f-a8e1-4009-a95b-946aca049871-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.165844 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83ede4e1-292f-40c0-8e1f-cc44190a0c92-utilities" (OuterVolumeSpecName: "utilities") pod "83ede4e1-292f-40c0-8e1f-cc44190a0c92" (UID: "83ede4e1-292f-40c0-8e1f-cc44190a0c92"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.167126 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/467ef27d-8f51-4317-80ee-9071d7024f86-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "467ef27d-8f51-4317-80ee-9071d7024f86" (UID: "467ef27d-8f51-4317-80ee-9071d7024f86"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.167230 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/378f0e3d-577f-4e51-a994-411d062c9fba-utilities" (OuterVolumeSpecName: "utilities") pod "378f0e3d-577f-4e51-a994-411d062c9fba" (UID: "378f0e3d-577f-4e51-a994-411d062c9fba"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.168027 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-utilities" (OuterVolumeSpecName: "utilities") pod "4e73c4d3-3b24-40d0-af22-fbf37ec4716d" (UID: "4e73c4d3-3b24-40d0-af22-fbf37ec4716d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.169294 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83ede4e1-292f-40c0-8e1f-cc44190a0c92-kube-api-access-qpn7j" (OuterVolumeSpecName: "kube-api-access-qpn7j") pod "83ede4e1-292f-40c0-8e1f-cc44190a0c92" (UID: "83ede4e1-292f-40c0-8e1f-cc44190a0c92"). InnerVolumeSpecName "kube-api-access-qpn7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.170591 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467ef27d-8f51-4317-80ee-9071d7024f86-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "467ef27d-8f51-4317-80ee-9071d7024f86" (UID: "467ef27d-8f51-4317-80ee-9071d7024f86"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.170626 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-kube-api-access-2b6qc" (OuterVolumeSpecName: "kube-api-access-2b6qc") pod "4e73c4d3-3b24-40d0-af22-fbf37ec4716d" (UID: "4e73c4d3-3b24-40d0-af22-fbf37ec4716d"). InnerVolumeSpecName "kube-api-access-2b6qc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.171118 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/378f0e3d-577f-4e51-a994-411d062c9fba-kube-api-access-pdbft" (OuterVolumeSpecName: "kube-api-access-pdbft") pod "378f0e3d-577f-4e51-a994-411d062c9fba" (UID: "378f0e3d-577f-4e51-a994-411d062c9fba"). InnerVolumeSpecName "kube-api-access-pdbft". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.171153 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/467ef27d-8f51-4317-80ee-9071d7024f86-kube-api-access-dl6rv" (OuterVolumeSpecName: "kube-api-access-dl6rv") pod "467ef27d-8f51-4317-80ee-9071d7024f86" (UID: "467ef27d-8f51-4317-80ee-9071d7024f86"). InnerVolumeSpecName "kube-api-access-dl6rv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.188823 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/378f0e3d-577f-4e51-a994-411d062c9fba-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "378f0e3d-577f-4e51-a994-411d062c9fba" (UID: "378f0e3d-577f-4e51-a994-411d062c9fba"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.230811 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83ede4e1-292f-40c0-8e1f-cc44190a0c92-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "83ede4e1-292f-40c0-8e1f-cc44190a0c92" (UID: "83ede4e1-292f-40c0-8e1f-cc44190a0c92"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.266288 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b6qc\" (UniqueName: \"kubernetes.io/projected/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-kube-api-access-2b6qc\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.266316 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl6rv\" (UniqueName: \"kubernetes.io/projected/467ef27d-8f51-4317-80ee-9071d7024f86-kube-api-access-dl6rv\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.266325 4755 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/467ef27d-8f51-4317-80ee-9071d7024f86-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.266334 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378f0e3d-577f-4e51-a994-411d062c9fba-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.266344 4755 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/467ef27d-8f51-4317-80ee-9071d7024f86-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.266353 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpn7j\" (UniqueName: \"kubernetes.io/projected/83ede4e1-292f-40c0-8e1f-cc44190a0c92-kube-api-access-qpn7j\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.266362 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdbft\" (UniqueName: \"kubernetes.io/projected/378f0e3d-577f-4e51-a994-411d062c9fba-kube-api-access-pdbft\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.266370 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.266378 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ede4e1-292f-40c0-8e1f-cc44190a0c92-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.266386 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378f0e3d-577f-4e51-a994-411d062c9fba-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.266393 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ede4e1-292f-40c0-8e1f-cc44190a0c92-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.290359 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4e73c4d3-3b24-40d0-af22-fbf37ec4716d" (UID: "4e73c4d3-3b24-40d0-af22-fbf37ec4716d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.328429 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6spmr"] Feb 02 22:39:52 crc kubenswrapper[4755]: W0202 22:39:52.331527 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb30941eb_134e_4f00_9501_e8f8f47e9822.slice/crio-54a0082cb3d4d0763a61b5e5e0ced1a549273e6690219a916462ee1718f0ad9b WatchSource:0}: Error finding container 54a0082cb3d4d0763a61b5e5e0ced1a549273e6690219a916462ee1718f0ad9b: Status 404 returned error can't find the container with id 54a0082cb3d4d0763a61b5e5e0ced1a549273e6690219a916462ee1718f0ad9b Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.367859 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e73c4d3-3b24-40d0-af22-fbf37ec4716d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.749898 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4db9" event={"ID":"378f0e3d-577f-4e51-a994-411d062c9fba","Type":"ContainerDied","Data":"0dd180a935804a51afc53dee2e40f356608f8c0b156118e5ccca9c6e04bc0543"} Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.749910 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g4db9" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.749958 4755 scope.go:117] "RemoveContainer" containerID="c19fc5733662fc0a3b611cc988fcf6be578d23aa4143aeac706b1098bdf6d3cd" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.752834 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9k7fx" event={"ID":"2025f58f-a8e1-4009-a95b-946aca049871","Type":"ContainerDied","Data":"ddaeb369969862a5b15ff86f6f118f6b1cce7863e8263909644e7b6f7c5f133a"} Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.752882 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9k7fx" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.757046 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-964gh" event={"ID":"83ede4e1-292f-40c0-8e1f-cc44190a0c92","Type":"ContainerDied","Data":"d2da6b577849fcebe94ac394c7b98fb662318620008ef244953017628aa35fe8"} Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.757137 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-964gh" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.762371 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4hn7q" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.762523 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4hn7q" event={"ID":"4e73c4d3-3b24-40d0-af22-fbf37ec4716d","Type":"ContainerDied","Data":"4da64895c9a0304f263668d1410609e9972235f66c08062307a32661e389a02a"} Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.766512 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" event={"ID":"b30941eb-134e-4f00-9501-e8f8f47e9822","Type":"ContainerStarted","Data":"376459f1f6ad9444d59eb4f7c387572280e0256c8dcdcaae2e7a862a3f2fdabf"} Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.766619 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" event={"ID":"b30941eb-134e-4f00-9501-e8f8f47e9822","Type":"ContainerStarted","Data":"54a0082cb3d4d0763a61b5e5e0ced1a549273e6690219a916462ee1718f0ad9b"} Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.767701 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.772309 4755 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6spmr container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.70:8080/healthz\": dial tcp 10.217.0.70:8080: connect: connection refused" start-of-body= Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.772356 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" podUID="b30941eb-134e-4f00-9501-e8f8f47e9822" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.70:8080/healthz\": dial tcp 10.217.0.70:8080: connect: connection refused" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.772414 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" event={"ID":"467ef27d-8f51-4317-80ee-9071d7024f86","Type":"ContainerDied","Data":"f7648f6029f3757bd1dbb72c51d40345d84b104d001ec656ba1ee6ef7b1afd7c"} Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.772442 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hnz5f" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.786949 4755 scope.go:117] "RemoveContainer" containerID="4d141543f4d2f096c49e2f69ae029b778536d934e88b6f09df0da6cd658ec22d" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.805146 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" podStartSLOduration=1.8051142100000002 podStartE2EDuration="1.80511421s" podCreationTimestamp="2026-02-02 22:39:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:39:52.792415294 +0000 UTC m=+348.483635630" watchObservedRunningTime="2026-02-02 22:39:52.80511421 +0000 UTC m=+348.496334566" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.824358 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-964gh"] Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.833831 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-964gh"] Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.834635 4755 scope.go:117] "RemoveContainer" containerID="dac9dbeb132aa3573b6030831983ebc2a5e95cc337c40c5e321db6c7bc73887e" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.839158 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g4db9"] Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.843778 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g4db9"] Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.851990 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9k7fx"] Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.856300 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9k7fx"] Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.859118 4755 scope.go:117] "RemoveContainer" containerID="b4e4a7206f9adfc2965df1140c3bbb7e5ad975592de4c1b15fd1907ba58d5add" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.860183 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4hn7q"] Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.865136 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4hn7q"] Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.867784 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hnz5f"] Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.870390 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hnz5f"] Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.876658 4755 scope.go:117] "RemoveContainer" containerID="7b958054b761b58b833bc12aee02d771e474703923516043a6adb0416bc780a6" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.890379 4755 scope.go:117] "RemoveContainer" containerID="7af566c722005b267b5f9e76e05151dc64577f6d9ea516fe91f160b3e33893b8" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.901579 4755 scope.go:117] "RemoveContainer" containerID="f5293d7deb4b1c941325897664b50e44b1e6016d741c3f25d47daede2b025b5c" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.916779 4755 scope.go:117] "RemoveContainer" containerID="040f52e1e9f1cf06abd6637c3f09f736e4982d0d1ba26e5505d6ae9de5f987cb" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.928977 4755 scope.go:117] "RemoveContainer" containerID="4400763e753b41b7f672fb1b3cf7d232596c6b13075a837d4732685954a8c912" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.938616 4755 scope.go:117] "RemoveContainer" containerID="5deff95394140bce4d5ccd1cdd4205a7b19b855cee35d71b424737a91bd19b1f" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.952339 4755 scope.go:117] "RemoveContainer" containerID="f8174a16253f429291d86dfd79669a2201a09e22a9ba6533f0fbc1e237b753de" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.972071 4755 scope.go:117] "RemoveContainer" containerID="f862110e464467891a7975a0b24312c73f15d43d7661d282a379c3ac64168a62" Feb 02 22:39:52 crc kubenswrapper[4755]: I0202 22:39:52.989691 4755 scope.go:117] "RemoveContainer" containerID="80ea541105c28da2ae11e17fb34aaf04bb86c51a802d6621ddf4954c92bdfaae" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.082212 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2025f58f-a8e1-4009-a95b-946aca049871" path="/var/lib/kubelet/pods/2025f58f-a8e1-4009-a95b-946aca049871/volumes" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.083397 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="378f0e3d-577f-4e51-a994-411d062c9fba" path="/var/lib/kubelet/pods/378f0e3d-577f-4e51-a994-411d062c9fba/volumes" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.084627 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="467ef27d-8f51-4317-80ee-9071d7024f86" path="/var/lib/kubelet/pods/467ef27d-8f51-4317-80ee-9071d7024f86/volumes" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.086464 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" path="/var/lib/kubelet/pods/4e73c4d3-3b24-40d0-af22-fbf37ec4716d/volumes" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.087609 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" path="/var/lib/kubelet/pods/83ede4e1-292f-40c0-8e1f-cc44190a0c92/volumes" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.298621 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gwz27"] Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.299499 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" containerName="extract-content" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.299528 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" containerName="extract-content" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.299586 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2025f58f-a8e1-4009-a95b-946aca049871" containerName="extract-content" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.299601 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2025f58f-a8e1-4009-a95b-946aca049871" containerName="extract-content" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.299618 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="467ef27d-8f51-4317-80ee-9071d7024f86" containerName="marketplace-operator" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.299670 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="467ef27d-8f51-4317-80ee-9071d7024f86" containerName="marketplace-operator" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.299690 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" containerName="registry-server" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.299704 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" containerName="registry-server" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.299774 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2025f58f-a8e1-4009-a95b-946aca049871" containerName="registry-server" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.299790 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2025f58f-a8e1-4009-a95b-946aca049871" containerName="registry-server" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.299815 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="378f0e3d-577f-4e51-a994-411d062c9fba" containerName="registry-server" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.299865 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="378f0e3d-577f-4e51-a994-411d062c9fba" containerName="registry-server" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.299885 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" containerName="extract-utilities" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.299898 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" containerName="extract-utilities" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.299912 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" containerName="extract-content" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.299964 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" containerName="extract-content" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.299983 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="467ef27d-8f51-4317-80ee-9071d7024f86" containerName="marketplace-operator" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.299995 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="467ef27d-8f51-4317-80ee-9071d7024f86" containerName="marketplace-operator" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.300014 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" containerName="registry-server" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.300064 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" containerName="registry-server" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.300081 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" containerName="extract-utilities" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.300094 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" containerName="extract-utilities" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.300175 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="378f0e3d-577f-4e51-a994-411d062c9fba" containerName="extract-content" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.300190 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="378f0e3d-577f-4e51-a994-411d062c9fba" containerName="extract-content" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.300205 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2025f58f-a8e1-4009-a95b-946aca049871" containerName="extract-utilities" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.300219 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2025f58f-a8e1-4009-a95b-946aca049871" containerName="extract-utilities" Feb 02 22:39:53 crc kubenswrapper[4755]: E0202 22:39:53.300279 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="378f0e3d-577f-4e51-a994-411d062c9fba" containerName="extract-utilities" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.300294 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="378f0e3d-577f-4e51-a994-411d062c9fba" containerName="extract-utilities" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.300566 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="83ede4e1-292f-40c0-8e1f-cc44190a0c92" containerName="registry-server" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.300593 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="378f0e3d-577f-4e51-a994-411d062c9fba" containerName="registry-server" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.300645 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2025f58f-a8e1-4009-a95b-946aca049871" containerName="registry-server" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.300661 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="467ef27d-8f51-4317-80ee-9071d7024f86" containerName="marketplace-operator" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.300680 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="467ef27d-8f51-4317-80ee-9071d7024f86" containerName="marketplace-operator" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.300771 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e73c4d3-3b24-40d0-af22-fbf37ec4716d" containerName="registry-server" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.303489 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.305169 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gwz27"] Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.307520 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.389815 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.390383 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.480458 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxtrm\" (UniqueName: \"kubernetes.io/projected/31e5d7ec-069a-4def-b27c-8c8418f98ce4-kube-api-access-nxtrm\") pod \"certified-operators-gwz27\" (UID: \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\") " pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.480551 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e5d7ec-069a-4def-b27c-8c8418f98ce4-utilities\") pod \"certified-operators-gwz27\" (UID: \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\") " pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.480580 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e5d7ec-069a-4def-b27c-8c8418f98ce4-catalog-content\") pod \"certified-operators-gwz27\" (UID: \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\") " pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.581869 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxtrm\" (UniqueName: \"kubernetes.io/projected/31e5d7ec-069a-4def-b27c-8c8418f98ce4-kube-api-access-nxtrm\") pod \"certified-operators-gwz27\" (UID: \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\") " pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.581934 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e5d7ec-069a-4def-b27c-8c8418f98ce4-utilities\") pod \"certified-operators-gwz27\" (UID: \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\") " pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.581972 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e5d7ec-069a-4def-b27c-8c8418f98ce4-catalog-content\") pod \"certified-operators-gwz27\" (UID: \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\") " pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.582507 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e5d7ec-069a-4def-b27c-8c8418f98ce4-catalog-content\") pod \"certified-operators-gwz27\" (UID: \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\") " pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.582642 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e5d7ec-069a-4def-b27c-8c8418f98ce4-utilities\") pod \"certified-operators-gwz27\" (UID: \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\") " pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.600254 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxtrm\" (UniqueName: \"kubernetes.io/projected/31e5d7ec-069a-4def-b27c-8c8418f98ce4-kube-api-access-nxtrm\") pod \"certified-operators-gwz27\" (UID: \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\") " pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.664818 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.797310 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-6spmr" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.883907 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2mpx7"] Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.884838 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.886806 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.900778 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2mpx7"] Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.988316 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9db9f5df-c29a-49c2-9130-f5066595eb43-utilities\") pod \"redhat-marketplace-2mpx7\" (UID: \"9db9f5df-c29a-49c2-9130-f5066595eb43\") " pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.988373 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz7mf\" (UniqueName: \"kubernetes.io/projected/9db9f5df-c29a-49c2-9130-f5066595eb43-kube-api-access-vz7mf\") pod \"redhat-marketplace-2mpx7\" (UID: \"9db9f5df-c29a-49c2-9130-f5066595eb43\") " pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:39:53 crc kubenswrapper[4755]: I0202 22:39:53.988414 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9db9f5df-c29a-49c2-9130-f5066595eb43-catalog-content\") pod \"redhat-marketplace-2mpx7\" (UID: \"9db9f5df-c29a-49c2-9130-f5066595eb43\") " pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.090260 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9db9f5df-c29a-49c2-9130-f5066595eb43-utilities\") pod \"redhat-marketplace-2mpx7\" (UID: \"9db9f5df-c29a-49c2-9130-f5066595eb43\") " pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.090337 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz7mf\" (UniqueName: \"kubernetes.io/projected/9db9f5df-c29a-49c2-9130-f5066595eb43-kube-api-access-vz7mf\") pod \"redhat-marketplace-2mpx7\" (UID: \"9db9f5df-c29a-49c2-9130-f5066595eb43\") " pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.090401 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9db9f5df-c29a-49c2-9130-f5066595eb43-catalog-content\") pod \"redhat-marketplace-2mpx7\" (UID: \"9db9f5df-c29a-49c2-9130-f5066595eb43\") " pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.090931 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9db9f5df-c29a-49c2-9130-f5066595eb43-utilities\") pod \"redhat-marketplace-2mpx7\" (UID: \"9db9f5df-c29a-49c2-9130-f5066595eb43\") " pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.091090 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9db9f5df-c29a-49c2-9130-f5066595eb43-catalog-content\") pod \"redhat-marketplace-2mpx7\" (UID: \"9db9f5df-c29a-49c2-9130-f5066595eb43\") " pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.103508 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gwz27"] Feb 02 22:39:54 crc kubenswrapper[4755]: W0202 22:39:54.111323 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31e5d7ec_069a_4def_b27c_8c8418f98ce4.slice/crio-c9e9a797da9a958d65b9097394cee387937ac824b5462365beb8307ce3576dfa WatchSource:0}: Error finding container c9e9a797da9a958d65b9097394cee387937ac824b5462365beb8307ce3576dfa: Status 404 returned error can't find the container with id c9e9a797da9a958d65b9097394cee387937ac824b5462365beb8307ce3576dfa Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.117655 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz7mf\" (UniqueName: \"kubernetes.io/projected/9db9f5df-c29a-49c2-9130-f5066595eb43-kube-api-access-vz7mf\") pod \"redhat-marketplace-2mpx7\" (UID: \"9db9f5df-c29a-49c2-9130-f5066595eb43\") " pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.208990 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.611493 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2mpx7"] Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.798805 4755 generic.go:334] "Generic (PLEG): container finished" podID="31e5d7ec-069a-4def-b27c-8c8418f98ce4" containerID="0fe61b2f4856e7fbbc79b0a66c88250c1554907769b6679f3e9ddfe81298f435" exitCode=0 Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.798854 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwz27" event={"ID":"31e5d7ec-069a-4def-b27c-8c8418f98ce4","Type":"ContainerDied","Data":"0fe61b2f4856e7fbbc79b0a66c88250c1554907769b6679f3e9ddfe81298f435"} Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.798903 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwz27" event={"ID":"31e5d7ec-069a-4def-b27c-8c8418f98ce4","Type":"ContainerStarted","Data":"c9e9a797da9a958d65b9097394cee387937ac824b5462365beb8307ce3576dfa"} Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.802393 4755 generic.go:334] "Generic (PLEG): container finished" podID="9db9f5df-c29a-49c2-9130-f5066595eb43" containerID="866f8ba1459ea753ed9951eda2b869586e37d998201ea066a5d75555bc655b77" exitCode=0 Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.802494 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2mpx7" event={"ID":"9db9f5df-c29a-49c2-9130-f5066595eb43","Type":"ContainerDied","Data":"866f8ba1459ea753ed9951eda2b869586e37d998201ea066a5d75555bc655b77"} Feb 02 22:39:54 crc kubenswrapper[4755]: I0202 22:39:54.802570 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2mpx7" event={"ID":"9db9f5df-c29a-49c2-9130-f5066595eb43","Type":"ContainerStarted","Data":"61f397f75e7ca7fdbc22be8bf8d2e6ec7522b63d8b04322d2d468af41945cf32"} Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.692182 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lrlh4"] Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.693751 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.700089 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.713954 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lrlh4"] Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.717280 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e855c69f-498a-4338-b0c2-d5d94116c359-catalog-content\") pod \"redhat-operators-lrlh4\" (UID: \"e855c69f-498a-4338-b0c2-d5d94116c359\") " pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.717355 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqz5z\" (UniqueName: \"kubernetes.io/projected/e855c69f-498a-4338-b0c2-d5d94116c359-kube-api-access-nqz5z\") pod \"redhat-operators-lrlh4\" (UID: \"e855c69f-498a-4338-b0c2-d5d94116c359\") " pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.717403 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e855c69f-498a-4338-b0c2-d5d94116c359-utilities\") pod \"redhat-operators-lrlh4\" (UID: \"e855c69f-498a-4338-b0c2-d5d94116c359\") " pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.813888 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwz27" event={"ID":"31e5d7ec-069a-4def-b27c-8c8418f98ce4","Type":"ContainerStarted","Data":"0d8dfeb813704fd0e6058fc2ff777f35d4a28267b60cfebd8f343c88658b235d"} Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.817838 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e855c69f-498a-4338-b0c2-d5d94116c359-catalog-content\") pod \"redhat-operators-lrlh4\" (UID: \"e855c69f-498a-4338-b0c2-d5d94116c359\") " pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.817912 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqz5z\" (UniqueName: \"kubernetes.io/projected/e855c69f-498a-4338-b0c2-d5d94116c359-kube-api-access-nqz5z\") pod \"redhat-operators-lrlh4\" (UID: \"e855c69f-498a-4338-b0c2-d5d94116c359\") " pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.817939 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e855c69f-498a-4338-b0c2-d5d94116c359-utilities\") pod \"redhat-operators-lrlh4\" (UID: \"e855c69f-498a-4338-b0c2-d5d94116c359\") " pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.818318 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e855c69f-498a-4338-b0c2-d5d94116c359-utilities\") pod \"redhat-operators-lrlh4\" (UID: \"e855c69f-498a-4338-b0c2-d5d94116c359\") " pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.818821 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e855c69f-498a-4338-b0c2-d5d94116c359-catalog-content\") pod \"redhat-operators-lrlh4\" (UID: \"e855c69f-498a-4338-b0c2-d5d94116c359\") " pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:39:55 crc kubenswrapper[4755]: I0202 22:39:55.842490 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqz5z\" (UniqueName: \"kubernetes.io/projected/e855c69f-498a-4338-b0c2-d5d94116c359-kube-api-access-nqz5z\") pod \"redhat-operators-lrlh4\" (UID: \"e855c69f-498a-4338-b0c2-d5d94116c359\") " pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.015676 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.285425 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-82rwp"] Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.287203 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.292198 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.297245 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-82rwp"] Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.323462 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dbe5a97-2c71-4e7a-b295-53dab6642a1b-catalog-content\") pod \"community-operators-82rwp\" (UID: \"3dbe5a97-2c71-4e7a-b295-53dab6642a1b\") " pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.323523 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dbe5a97-2c71-4e7a-b295-53dab6642a1b-utilities\") pod \"community-operators-82rwp\" (UID: \"3dbe5a97-2c71-4e7a-b295-53dab6642a1b\") " pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.323559 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftkv2\" (UniqueName: \"kubernetes.io/projected/3dbe5a97-2c71-4e7a-b295-53dab6642a1b-kube-api-access-ftkv2\") pod \"community-operators-82rwp\" (UID: \"3dbe5a97-2c71-4e7a-b295-53dab6642a1b\") " pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.424940 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dbe5a97-2c71-4e7a-b295-53dab6642a1b-catalog-content\") pod \"community-operators-82rwp\" (UID: \"3dbe5a97-2c71-4e7a-b295-53dab6642a1b\") " pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.425003 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dbe5a97-2c71-4e7a-b295-53dab6642a1b-utilities\") pod \"community-operators-82rwp\" (UID: \"3dbe5a97-2c71-4e7a-b295-53dab6642a1b\") " pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.425049 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftkv2\" (UniqueName: \"kubernetes.io/projected/3dbe5a97-2c71-4e7a-b295-53dab6642a1b-kube-api-access-ftkv2\") pod \"community-operators-82rwp\" (UID: \"3dbe5a97-2c71-4e7a-b295-53dab6642a1b\") " pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.425479 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dbe5a97-2c71-4e7a-b295-53dab6642a1b-catalog-content\") pod \"community-operators-82rwp\" (UID: \"3dbe5a97-2c71-4e7a-b295-53dab6642a1b\") " pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.426223 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dbe5a97-2c71-4e7a-b295-53dab6642a1b-utilities\") pod \"community-operators-82rwp\" (UID: \"3dbe5a97-2c71-4e7a-b295-53dab6642a1b\") " pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.441439 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lrlh4"] Feb 02 22:39:56 crc kubenswrapper[4755]: W0202 22:39:56.442589 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode855c69f_498a_4338_b0c2_d5d94116c359.slice/crio-6613b66bad2cff2fd19cd40790d93a286af6d3c47edbd9a5f27d487591c5c1bd WatchSource:0}: Error finding container 6613b66bad2cff2fd19cd40790d93a286af6d3c47edbd9a5f27d487591c5c1bd: Status 404 returned error can't find the container with id 6613b66bad2cff2fd19cd40790d93a286af6d3c47edbd9a5f27d487591c5c1bd Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.447338 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftkv2\" (UniqueName: \"kubernetes.io/projected/3dbe5a97-2c71-4e7a-b295-53dab6642a1b-kube-api-access-ftkv2\") pod \"community-operators-82rwp\" (UID: \"3dbe5a97-2c71-4e7a-b295-53dab6642a1b\") " pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.609015 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.756532 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-7clsl" Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.806161 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ch6t8"] Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.836266 4755 generic.go:334] "Generic (PLEG): container finished" podID="9db9f5df-c29a-49c2-9130-f5066595eb43" containerID="9e0397a1aba5a0a0d7a4d6f69522a85c14d9b83e55d85a6a3f9483adbfbca401" exitCode=0 Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.836408 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2mpx7" event={"ID":"9db9f5df-c29a-49c2-9130-f5066595eb43","Type":"ContainerDied","Data":"9e0397a1aba5a0a0d7a4d6f69522a85c14d9b83e55d85a6a3f9483adbfbca401"} Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.851450 4755 generic.go:334] "Generic (PLEG): container finished" podID="31e5d7ec-069a-4def-b27c-8c8418f98ce4" containerID="0d8dfeb813704fd0e6058fc2ff777f35d4a28267b60cfebd8f343c88658b235d" exitCode=0 Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.851573 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwz27" event={"ID":"31e5d7ec-069a-4def-b27c-8c8418f98ce4","Type":"ContainerDied","Data":"0d8dfeb813704fd0e6058fc2ff777f35d4a28267b60cfebd8f343c88658b235d"} Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.854896 4755 generic.go:334] "Generic (PLEG): container finished" podID="e855c69f-498a-4338-b0c2-d5d94116c359" containerID="8ed9282632a0f460ce8a60cc0238fde2a695be76aa4ff85ad2c7df9be65425f4" exitCode=0 Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.854946 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lrlh4" event={"ID":"e855c69f-498a-4338-b0c2-d5d94116c359","Type":"ContainerDied","Data":"8ed9282632a0f460ce8a60cc0238fde2a695be76aa4ff85ad2c7df9be65425f4"} Feb 02 22:39:56 crc kubenswrapper[4755]: I0202 22:39:56.854987 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lrlh4" event={"ID":"e855c69f-498a-4338-b0c2-d5d94116c359","Type":"ContainerStarted","Data":"6613b66bad2cff2fd19cd40790d93a286af6d3c47edbd9a5f27d487591c5c1bd"} Feb 02 22:39:57 crc kubenswrapper[4755]: I0202 22:39:57.013408 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-82rwp"] Feb 02 22:39:57 crc kubenswrapper[4755]: W0202 22:39:57.016419 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dbe5a97_2c71_4e7a_b295_53dab6642a1b.slice/crio-c5ce2dc4d456f76ae9dfa4b47c6584d8449241d16dae9073449690de03c452ed WatchSource:0}: Error finding container c5ce2dc4d456f76ae9dfa4b47c6584d8449241d16dae9073449690de03c452ed: Status 404 returned error can't find the container with id c5ce2dc4d456f76ae9dfa4b47c6584d8449241d16dae9073449690de03c452ed Feb 02 22:39:57 crc kubenswrapper[4755]: I0202 22:39:57.862246 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwz27" event={"ID":"31e5d7ec-069a-4def-b27c-8c8418f98ce4","Type":"ContainerStarted","Data":"c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172"} Feb 02 22:39:57 crc kubenswrapper[4755]: I0202 22:39:57.863852 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lrlh4" event={"ID":"e855c69f-498a-4338-b0c2-d5d94116c359","Type":"ContainerStarted","Data":"9e54123397f2647e5f401a11aac8aad8c2788026d7e308bb5f9e288ccd907e86"} Feb 02 22:39:57 crc kubenswrapper[4755]: I0202 22:39:57.868758 4755 generic.go:334] "Generic (PLEG): container finished" podID="3dbe5a97-2c71-4e7a-b295-53dab6642a1b" containerID="5df5ec14096cab0a5e2362e1afc991c6ba0218d389360f8a0cfd299e926c6965" exitCode=0 Feb 02 22:39:57 crc kubenswrapper[4755]: I0202 22:39:57.868789 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-82rwp" event={"ID":"3dbe5a97-2c71-4e7a-b295-53dab6642a1b","Type":"ContainerDied","Data":"5df5ec14096cab0a5e2362e1afc991c6ba0218d389360f8a0cfd299e926c6965"} Feb 02 22:39:57 crc kubenswrapper[4755]: I0202 22:39:57.868832 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-82rwp" event={"ID":"3dbe5a97-2c71-4e7a-b295-53dab6642a1b","Type":"ContainerStarted","Data":"c5ce2dc4d456f76ae9dfa4b47c6584d8449241d16dae9073449690de03c452ed"} Feb 02 22:39:57 crc kubenswrapper[4755]: I0202 22:39:57.870560 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2mpx7" event={"ID":"9db9f5df-c29a-49c2-9130-f5066595eb43","Type":"ContainerStarted","Data":"bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0"} Feb 02 22:39:57 crc kubenswrapper[4755]: I0202 22:39:57.880390 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gwz27" podStartSLOduration=2.417289421 podStartE2EDuration="4.880350832s" podCreationTimestamp="2026-02-02 22:39:53 +0000 UTC" firstStartedPulling="2026-02-02 22:39:54.800744479 +0000 UTC m=+350.491964805" lastFinishedPulling="2026-02-02 22:39:57.26380588 +0000 UTC m=+352.955026216" observedRunningTime="2026-02-02 22:39:57.878394426 +0000 UTC m=+353.569614752" watchObservedRunningTime="2026-02-02 22:39:57.880350832 +0000 UTC m=+353.571571158" Feb 02 22:39:57 crc kubenswrapper[4755]: I0202 22:39:57.934659 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2mpx7" podStartSLOduration=2.45717569 podStartE2EDuration="4.934638635s" podCreationTimestamp="2026-02-02 22:39:53 +0000 UTC" firstStartedPulling="2026-02-02 22:39:54.803357034 +0000 UTC m=+350.494577360" lastFinishedPulling="2026-02-02 22:39:57.280819969 +0000 UTC m=+352.972040305" observedRunningTime="2026-02-02 22:39:57.930632529 +0000 UTC m=+353.621852845" watchObservedRunningTime="2026-02-02 22:39:57.934638635 +0000 UTC m=+353.625858971" Feb 02 22:39:58 crc kubenswrapper[4755]: I0202 22:39:58.878663 4755 generic.go:334] "Generic (PLEG): container finished" podID="e855c69f-498a-4338-b0c2-d5d94116c359" containerID="9e54123397f2647e5f401a11aac8aad8c2788026d7e308bb5f9e288ccd907e86" exitCode=0 Feb 02 22:39:58 crc kubenswrapper[4755]: I0202 22:39:58.879625 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lrlh4" event={"ID":"e855c69f-498a-4338-b0c2-d5d94116c359","Type":"ContainerDied","Data":"9e54123397f2647e5f401a11aac8aad8c2788026d7e308bb5f9e288ccd907e86"} Feb 02 22:39:59 crc kubenswrapper[4755]: I0202 22:39:59.887894 4755 generic.go:334] "Generic (PLEG): container finished" podID="3dbe5a97-2c71-4e7a-b295-53dab6642a1b" containerID="9caa476a33d2b9e8085e81677f6ba924e5d45f66c04a8d516d5ed746ef5e669f" exitCode=0 Feb 02 22:39:59 crc kubenswrapper[4755]: I0202 22:39:59.887988 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-82rwp" event={"ID":"3dbe5a97-2c71-4e7a-b295-53dab6642a1b","Type":"ContainerDied","Data":"9caa476a33d2b9e8085e81677f6ba924e5d45f66c04a8d516d5ed746ef5e669f"} Feb 02 22:40:00 crc kubenswrapper[4755]: I0202 22:40:00.896958 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-82rwp" event={"ID":"3dbe5a97-2c71-4e7a-b295-53dab6642a1b","Type":"ContainerStarted","Data":"2b2dec8b1a1aba8ed1436424a084677a5e860c0db7ebbececdabdc292409bf28"} Feb 02 22:40:00 crc kubenswrapper[4755]: I0202 22:40:00.901079 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lrlh4" event={"ID":"e855c69f-498a-4338-b0c2-d5d94116c359","Type":"ContainerStarted","Data":"7e716e05a53fdf4fc430605c115136cd5a7903e99ae8ee7fdfb832d4c537356c"} Feb 02 22:40:00 crc kubenswrapper[4755]: I0202 22:40:00.923444 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-82rwp" podStartSLOduration=2.494867267 podStartE2EDuration="4.923425655s" podCreationTimestamp="2026-02-02 22:39:56 +0000 UTC" firstStartedPulling="2026-02-02 22:39:57.870048286 +0000 UTC m=+353.561268612" lastFinishedPulling="2026-02-02 22:40:00.298606674 +0000 UTC m=+355.989827000" observedRunningTime="2026-02-02 22:40:00.920432699 +0000 UTC m=+356.611653035" watchObservedRunningTime="2026-02-02 22:40:00.923425655 +0000 UTC m=+356.614645991" Feb 02 22:40:03 crc kubenswrapper[4755]: I0202 22:40:03.665294 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:40:03 crc kubenswrapper[4755]: I0202 22:40:03.666265 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:40:03 crc kubenswrapper[4755]: I0202 22:40:03.742345 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:40:03 crc kubenswrapper[4755]: I0202 22:40:03.760250 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lrlh4" podStartSLOduration=5.810112602 podStartE2EDuration="8.760227041s" podCreationTimestamp="2026-02-02 22:39:55 +0000 UTC" firstStartedPulling="2026-02-02 22:39:56.860153753 +0000 UTC m=+352.551374119" lastFinishedPulling="2026-02-02 22:39:59.810268232 +0000 UTC m=+355.501488558" observedRunningTime="2026-02-02 22:40:00.948253679 +0000 UTC m=+356.639474005" watchObservedRunningTime="2026-02-02 22:40:03.760227041 +0000 UTC m=+359.451447377" Feb 02 22:40:03 crc kubenswrapper[4755]: I0202 22:40:03.958819 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gwz27" Feb 02 22:40:04 crc kubenswrapper[4755]: I0202 22:40:04.209513 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:40:04 crc kubenswrapper[4755]: I0202 22:40:04.209573 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:40:04 crc kubenswrapper[4755]: I0202 22:40:04.262325 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:40:04 crc kubenswrapper[4755]: I0202 22:40:04.974366 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 22:40:06 crc kubenswrapper[4755]: I0202 22:40:06.016891 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:40:06 crc kubenswrapper[4755]: I0202 22:40:06.017187 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:40:06 crc kubenswrapper[4755]: I0202 22:40:06.609422 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:40:06 crc kubenswrapper[4755]: I0202 22:40:06.609486 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:40:06 crc kubenswrapper[4755]: I0202 22:40:06.646046 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:40:06 crc kubenswrapper[4755]: I0202 22:40:06.984534 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-82rwp" Feb 02 22:40:07 crc kubenswrapper[4755]: I0202 22:40:07.077862 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lrlh4" podUID="e855c69f-498a-4338-b0c2-d5d94116c359" containerName="registry-server" probeResult="failure" output=< Feb 02 22:40:07 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Feb 02 22:40:07 crc kubenswrapper[4755]: > Feb 02 22:40:16 crc kubenswrapper[4755]: I0202 22:40:16.087367 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:40:16 crc kubenswrapper[4755]: I0202 22:40:16.154011 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lrlh4" Feb 02 22:40:21 crc kubenswrapper[4755]: I0202 22:40:21.854332 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" podUID="2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" containerName="registry" containerID="cri-o://6573d5df5f700f87f9f5cd4e06ba0f332772de175a985adae854394930d921bf" gracePeriod=30 Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.027880 4755 generic.go:334] "Generic (PLEG): container finished" podID="2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" containerID="6573d5df5f700f87f9f5cd4e06ba0f332772de175a985adae854394930d921bf" exitCode=0 Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.027945 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" event={"ID":"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9","Type":"ContainerDied","Data":"6573d5df5f700f87f9f5cd4e06ba0f332772de175a985adae854394930d921bf"} Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.361050 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.417609 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-bound-sa-token\") pod \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.417678 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-registry-tls\") pod \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.417707 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-trusted-ca\") pod \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.417776 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzbl2\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-kube-api-access-qzbl2\") pod \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.417909 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.417940 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-installation-pull-secrets\") pod \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.418025 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-ca-trust-extracted\") pod \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.418086 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-registry-certificates\") pod \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\" (UID: \"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9\") " Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.418768 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.419470 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.420364 4755 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.420406 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.429548 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.430257 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.432471 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-kube-api-access-qzbl2" (OuterVolumeSpecName: "kube-api-access-qzbl2") pod "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9"). InnerVolumeSpecName "kube-api-access-qzbl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.436150 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.436236 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.447660 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" (UID: "2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.522143 4755 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.522227 4755 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.522257 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzbl2\" (UniqueName: \"kubernetes.io/projected/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-kube-api-access-qzbl2\") on node \"crc\" DevicePath \"\"" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.522290 4755 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 02 22:40:22 crc kubenswrapper[4755]: I0202 22:40:22.522321 4755 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 02 22:40:23 crc kubenswrapper[4755]: I0202 22:40:23.041070 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" event={"ID":"2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9","Type":"ContainerDied","Data":"9147f3a74e215eff869eabd27c19bbe0d62a372dee2bae5c215b17d89425f692"} Feb 02 22:40:23 crc kubenswrapper[4755]: I0202 22:40:23.041177 4755 scope.go:117] "RemoveContainer" containerID="6573d5df5f700f87f9f5cd4e06ba0f332772de175a985adae854394930d921bf" Feb 02 22:40:23 crc kubenswrapper[4755]: I0202 22:40:23.041178 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ch6t8" Feb 02 22:40:23 crc kubenswrapper[4755]: I0202 22:40:23.103240 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ch6t8"] Feb 02 22:40:23 crc kubenswrapper[4755]: I0202 22:40:23.111081 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ch6t8"] Feb 02 22:40:23 crc kubenswrapper[4755]: I0202 22:40:23.389804 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:40:23 crc kubenswrapper[4755]: I0202 22:40:23.389929 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:40:25 crc kubenswrapper[4755]: I0202 22:40:25.090667 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" path="/var/lib/kubelet/pods/2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9/volumes" Feb 02 22:40:53 crc kubenswrapper[4755]: I0202 22:40:53.390101 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:40:53 crc kubenswrapper[4755]: I0202 22:40:53.390977 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:40:53 crc kubenswrapper[4755]: I0202 22:40:53.391127 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:40:53 crc kubenswrapper[4755]: I0202 22:40:53.392375 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f71cdcc7a983e505ca4c9dbfa22fdcc5349a3f06e9d60bb048ae1ae60366e471"} pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 22:40:53 crc kubenswrapper[4755]: I0202 22:40:53.392477 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" containerID="cri-o://f71cdcc7a983e505ca4c9dbfa22fdcc5349a3f06e9d60bb048ae1ae60366e471" gracePeriod=600 Feb 02 22:40:54 crc kubenswrapper[4755]: I0202 22:40:54.270005 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerID="f71cdcc7a983e505ca4c9dbfa22fdcc5349a3f06e9d60bb048ae1ae60366e471" exitCode=0 Feb 02 22:40:54 crc kubenswrapper[4755]: I0202 22:40:54.270038 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerDied","Data":"f71cdcc7a983e505ca4c9dbfa22fdcc5349a3f06e9d60bb048ae1ae60366e471"} Feb 02 22:40:54 crc kubenswrapper[4755]: I0202 22:40:54.270814 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"56c4714b35140ee6353c0afeb63cfebceaadf5fa9fb114929c1b4447c72f0448"} Feb 02 22:40:54 crc kubenswrapper[4755]: I0202 22:40:54.270913 4755 scope.go:117] "RemoveContainer" containerID="b65c15a6c515b50d6a76f33d222e225c35afe658ebde78c200d46a6e3dcd9487" Feb 02 22:42:53 crc kubenswrapper[4755]: I0202 22:42:53.389679 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:42:53 crc kubenswrapper[4755]: I0202 22:42:53.390523 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:43:05 crc kubenswrapper[4755]: I0202 22:43:05.395149 4755 scope.go:117] "RemoveContainer" containerID="a151fab0c988d424ed0d48f166e5abd543b8158db33024ca22de1a7eee5d49a7" Feb 02 22:43:23 crc kubenswrapper[4755]: I0202 22:43:23.389644 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:43:23 crc kubenswrapper[4755]: I0202 22:43:23.390180 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:43:53 crc kubenswrapper[4755]: I0202 22:43:53.389339 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:43:53 crc kubenswrapper[4755]: I0202 22:43:53.390104 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:43:53 crc kubenswrapper[4755]: I0202 22:43:53.390182 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:43:53 crc kubenswrapper[4755]: I0202 22:43:53.391023 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"56c4714b35140ee6353c0afeb63cfebceaadf5fa9fb114929c1b4447c72f0448"} pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 22:43:53 crc kubenswrapper[4755]: I0202 22:43:53.391119 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" containerID="cri-o://56c4714b35140ee6353c0afeb63cfebceaadf5fa9fb114929c1b4447c72f0448" gracePeriod=600 Feb 02 22:43:54 crc kubenswrapper[4755]: I0202 22:43:54.527375 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerID="56c4714b35140ee6353c0afeb63cfebceaadf5fa9fb114929c1b4447c72f0448" exitCode=0 Feb 02 22:43:54 crc kubenswrapper[4755]: I0202 22:43:54.527478 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerDied","Data":"56c4714b35140ee6353c0afeb63cfebceaadf5fa9fb114929c1b4447c72f0448"} Feb 02 22:43:54 crc kubenswrapper[4755]: I0202 22:43:54.528274 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"bb11c6f11cba368ea19d2ffe2c3481fe5fb952a4be61f80011767257620e0091"} Feb 02 22:43:54 crc kubenswrapper[4755]: I0202 22:43:54.528326 4755 scope.go:117] "RemoveContainer" containerID="f71cdcc7a983e505ca4c9dbfa22fdcc5349a3f06e9d60bb048ae1ae60366e471" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.408536 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg"] Feb 02 22:44:36 crc kubenswrapper[4755]: E0202 22:44:36.409388 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" containerName="registry" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.409410 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" containerName="registry" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.409562 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fe8af2e-1abe-48dd-9afc-f0c4c524f8f9" containerName="registry" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.410958 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.413927 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.420750 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg"] Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.502260 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b889cf85-3262-4e0f-834f-5598d21c019b-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg\" (UID: \"b889cf85-3262-4e0f-834f-5598d21c019b\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.502355 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b889cf85-3262-4e0f-834f-5598d21c019b-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg\" (UID: \"b889cf85-3262-4e0f-834f-5598d21c019b\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.502480 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8sls\" (UniqueName: \"kubernetes.io/projected/b889cf85-3262-4e0f-834f-5598d21c019b-kube-api-access-x8sls\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg\" (UID: \"b889cf85-3262-4e0f-834f-5598d21c019b\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.603790 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b889cf85-3262-4e0f-834f-5598d21c019b-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg\" (UID: \"b889cf85-3262-4e0f-834f-5598d21c019b\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.603903 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b889cf85-3262-4e0f-834f-5598d21c019b-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg\" (UID: \"b889cf85-3262-4e0f-834f-5598d21c019b\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.603956 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8sls\" (UniqueName: \"kubernetes.io/projected/b889cf85-3262-4e0f-834f-5598d21c019b-kube-api-access-x8sls\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg\" (UID: \"b889cf85-3262-4e0f-834f-5598d21c019b\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.604390 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b889cf85-3262-4e0f-834f-5598d21c019b-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg\" (UID: \"b889cf85-3262-4e0f-834f-5598d21c019b\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.604697 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b889cf85-3262-4e0f-834f-5598d21c019b-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg\" (UID: \"b889cf85-3262-4e0f-834f-5598d21c019b\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.640855 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8sls\" (UniqueName: \"kubernetes.io/projected/b889cf85-3262-4e0f-834f-5598d21c019b-kube-api-access-x8sls\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg\" (UID: \"b889cf85-3262-4e0f-834f-5598d21c019b\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:36 crc kubenswrapper[4755]: I0202 22:44:36.773714 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:37 crc kubenswrapper[4755]: I0202 22:44:37.083768 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg"] Feb 02 22:44:37 crc kubenswrapper[4755]: I0202 22:44:37.862156 4755 generic.go:334] "Generic (PLEG): container finished" podID="b889cf85-3262-4e0f-834f-5598d21c019b" containerID="5d3029681efab661e5511c8baa322d152e2bbc4833a403c6b01bc8057d5596c4" exitCode=0 Feb 02 22:44:37 crc kubenswrapper[4755]: I0202 22:44:37.862213 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" event={"ID":"b889cf85-3262-4e0f-834f-5598d21c019b","Type":"ContainerDied","Data":"5d3029681efab661e5511c8baa322d152e2bbc4833a403c6b01bc8057d5596c4"} Feb 02 22:44:37 crc kubenswrapper[4755]: I0202 22:44:37.862248 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" event={"ID":"b889cf85-3262-4e0f-834f-5598d21c019b","Type":"ContainerStarted","Data":"21c59fc56b9c9d84b234df2693dd7281c321b457b6487ed612d2b21b7cdac9b8"} Feb 02 22:44:37 crc kubenswrapper[4755]: I0202 22:44:37.864850 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 22:44:39 crc kubenswrapper[4755]: I0202 22:44:39.879235 4755 generic.go:334] "Generic (PLEG): container finished" podID="b889cf85-3262-4e0f-834f-5598d21c019b" containerID="78c3a45b1d1236915c25671630cb0e027f0ba2ddc4f4cc63e97ba305b4800b08" exitCode=0 Feb 02 22:44:39 crc kubenswrapper[4755]: I0202 22:44:39.879816 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" event={"ID":"b889cf85-3262-4e0f-834f-5598d21c019b","Type":"ContainerDied","Data":"78c3a45b1d1236915c25671630cb0e027f0ba2ddc4f4cc63e97ba305b4800b08"} Feb 02 22:44:40 crc kubenswrapper[4755]: I0202 22:44:40.888649 4755 generic.go:334] "Generic (PLEG): container finished" podID="b889cf85-3262-4e0f-834f-5598d21c019b" containerID="798eb00b2aa51bc4124cf3090743871fdbb84b6fabfa276acfcaf7f0e8910cef" exitCode=0 Feb 02 22:44:40 crc kubenswrapper[4755]: I0202 22:44:40.888711 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" event={"ID":"b889cf85-3262-4e0f-834f-5598d21c019b","Type":"ContainerDied","Data":"798eb00b2aa51bc4124cf3090743871fdbb84b6fabfa276acfcaf7f0e8910cef"} Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.197652 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.289436 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b889cf85-3262-4e0f-834f-5598d21c019b-util\") pod \"b889cf85-3262-4e0f-834f-5598d21c019b\" (UID: \"b889cf85-3262-4e0f-834f-5598d21c019b\") " Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.289535 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b889cf85-3262-4e0f-834f-5598d21c019b-bundle\") pod \"b889cf85-3262-4e0f-834f-5598d21c019b\" (UID: \"b889cf85-3262-4e0f-834f-5598d21c019b\") " Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.289556 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8sls\" (UniqueName: \"kubernetes.io/projected/b889cf85-3262-4e0f-834f-5598d21c019b-kube-api-access-x8sls\") pod \"b889cf85-3262-4e0f-834f-5598d21c019b\" (UID: \"b889cf85-3262-4e0f-834f-5598d21c019b\") " Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.293992 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b889cf85-3262-4e0f-834f-5598d21c019b-bundle" (OuterVolumeSpecName: "bundle") pod "b889cf85-3262-4e0f-834f-5598d21c019b" (UID: "b889cf85-3262-4e0f-834f-5598d21c019b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.294900 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b889cf85-3262-4e0f-834f-5598d21c019b-kube-api-access-x8sls" (OuterVolumeSpecName: "kube-api-access-x8sls") pod "b889cf85-3262-4e0f-834f-5598d21c019b" (UID: "b889cf85-3262-4e0f-834f-5598d21c019b"). InnerVolumeSpecName "kube-api-access-x8sls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.320336 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b889cf85-3262-4e0f-834f-5598d21c019b-util" (OuterVolumeSpecName: "util") pod "b889cf85-3262-4e0f-834f-5598d21c019b" (UID: "b889cf85-3262-4e0f-834f-5598d21c019b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.391865 4755 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b889cf85-3262-4e0f-834f-5598d21c019b-util\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.391914 4755 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b889cf85-3262-4e0f-834f-5598d21c019b-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.391934 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8sls\" (UniqueName: \"kubernetes.io/projected/b889cf85-3262-4e0f-834f-5598d21c019b-kube-api-access-x8sls\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.906798 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" event={"ID":"b889cf85-3262-4e0f-834f-5598d21c019b","Type":"ContainerDied","Data":"21c59fc56b9c9d84b234df2693dd7281c321b457b6487ed612d2b21b7cdac9b8"} Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.906861 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21c59fc56b9c9d84b234df2693dd7281c321b457b6487ed612d2b21b7cdac9b8" Feb 02 22:44:42 crc kubenswrapper[4755]: I0202 22:44:42.907789 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg" Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.379039 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4mblb"] Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.379697 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovn-controller" containerID="cri-o://51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368" gracePeriod=30 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.379867 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="sbdb" containerID="cri-o://13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d" gracePeriod=30 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.379907 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="nbdb" containerID="cri-o://a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca" gracePeriod=30 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.379949 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="northd" containerID="cri-o://f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984" gracePeriod=30 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.379983 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337" gracePeriod=30 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.380014 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="kube-rbac-proxy-node" containerID="cri-o://11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c" gracePeriod=30 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.380067 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovn-acl-logging" containerID="cri-o://5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc" gracePeriod=30 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.413969 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" containerID="cri-o://aaad222fc241dbf84cc91f20beb39ee65aafdf28e0738184def5d58b69e05917" gracePeriod=30 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.933169 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovnkube-controller/3.log" Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935137 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovn-acl-logging/0.log" Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935516 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovn-controller/0.log" Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935837 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="aaad222fc241dbf84cc91f20beb39ee65aafdf28e0738184def5d58b69e05917" exitCode=0 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935857 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d" exitCode=0 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935864 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca" exitCode=0 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935871 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984" exitCode=0 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935890 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337" exitCode=0 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935897 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c" exitCode=0 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935902 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc" exitCode=143 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935909 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerID="51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368" exitCode=143 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935954 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"aaad222fc241dbf84cc91f20beb39ee65aafdf28e0738184def5d58b69e05917"} Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935980 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d"} Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935989 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca"} Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.935998 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984"} Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.936007 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337"} Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.936018 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c"} Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.936028 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc"} Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.936036 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368"} Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.936050 4755 scope.go:117] "RemoveContainer" containerID="23a5422c202fbce56b8c0b11d877cadff84502f56d66f5ce94b79164ff61417f" Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.938009 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5fdlw_c206b6fd-200d-47ea-88a5-453f3093c749/kube-multus/2.log" Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.938632 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5fdlw_c206b6fd-200d-47ea-88a5-453f3093c749/kube-multus/1.log" Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.938670 4755 generic.go:334] "Generic (PLEG): container finished" podID="c206b6fd-200d-47ea-88a5-453f3093c749" containerID="0058d3561a900f271e03fea16adb2cfa9d0fe60aa9931b488e9d55c739895d14" exitCode=2 Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.938701 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5fdlw" event={"ID":"c206b6fd-200d-47ea-88a5-453f3093c749","Type":"ContainerDied","Data":"0058d3561a900f271e03fea16adb2cfa9d0fe60aa9931b488e9d55c739895d14"} Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.939115 4755 scope.go:117] "RemoveContainer" containerID="0058d3561a900f271e03fea16adb2cfa9d0fe60aa9931b488e9d55c739895d14" Feb 02 22:44:47 crc kubenswrapper[4755]: E0202 22:44:47.939327 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-5fdlw_openshift-multus(c206b6fd-200d-47ea-88a5-453f3093c749)\"" pod="openshift-multus/multus-5fdlw" podUID="c206b6fd-200d-47ea-88a5-453f3093c749" Feb 02 22:44:47 crc kubenswrapper[4755]: I0202 22:44:47.956177 4755 scope.go:117] "RemoveContainer" containerID="9261f8876de2e09d0918c8008e1c4bde12d29318d43a80e47aa7d6e6cfc35b2b" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.092105 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovn-acl-logging/0.log" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.092455 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovn-controller/0.log" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.092757 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.157861 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mpvkm"] Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158059 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="sbdb" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158069 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="sbdb" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158079 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b889cf85-3262-4e0f-834f-5598d21c019b" containerName="util" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158085 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b889cf85-3262-4e0f-834f-5598d21c019b" containerName="util" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158093 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="nbdb" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158100 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="nbdb" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158110 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="kube-rbac-proxy-node" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158116 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="kube-rbac-proxy-node" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158124 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="kubecfg-setup" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158130 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="kubecfg-setup" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158138 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovn-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158143 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovn-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158152 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b889cf85-3262-4e0f-834f-5598d21c019b" containerName="extract" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158158 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b889cf85-3262-4e0f-834f-5598d21c019b" containerName="extract" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158166 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158171 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158178 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b889cf85-3262-4e0f-834f-5598d21c019b" containerName="pull" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158184 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b889cf85-3262-4e0f-834f-5598d21c019b" containerName="pull" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158191 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158198 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158206 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovn-acl-logging" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158212 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovn-acl-logging" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158219 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158225 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158232 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158237 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158243 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158248 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158255 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="northd" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158260 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="northd" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158345 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b889cf85-3262-4e0f-834f-5598d21c019b" containerName="extract" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158353 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158360 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovn-acl-logging" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158368 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158377 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovn-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158384 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158390 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158396 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158403 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="sbdb" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158411 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="kube-rbac-proxy-node" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158419 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="nbdb" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158426 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="northd" Feb 02 22:44:48 crc kubenswrapper[4755]: E0202 22:44:48.158507 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158513 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.158605 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" containerName="ovnkube-controller" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.159994 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.166750 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-log-socket\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.166795 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovn-node-metrics-cert\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.166810 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-systemd\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.166842 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-run-netns\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.166871 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rw4t5\" (UniqueName: \"kubernetes.io/projected/ae78d89e-7970-49df-8839-b1b6d7de4ec1-kube-api-access-rw4t5\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.166876 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-log-socket" (OuterVolumeSpecName: "log-socket") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.166894 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-run-ovn-kubernetes\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.166927 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.166946 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-node-log\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.166976 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-systemd-units\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.166996 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-cni-netd\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167014 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-var-lib-openvswitch\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167036 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-cni-bin\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167049 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167071 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovnkube-script-lib\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167100 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-env-overrides\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167129 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-kubelet\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167145 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-etc-openvswitch\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167172 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-openvswitch\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167199 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovnkube-config\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167216 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-slash\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167230 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-ovn\") pod \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\" (UID: \"ae78d89e-7970-49df-8839-b1b6d7de4ec1\") " Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167530 4755 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167550 4755 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-log-socket\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167574 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167595 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-node-log" (OuterVolumeSpecName: "node-log") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167611 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167613 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167631 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167646 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167673 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167703 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167708 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167738 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-slash" (OuterVolumeSpecName: "host-slash") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167773 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167815 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.167988 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.168006 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.168206 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.176910 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae78d89e-7970-49df-8839-b1b6d7de4ec1-kube-api-access-rw4t5" (OuterVolumeSpecName: "kube-api-access-rw4t5") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "kube-api-access-rw4t5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.177264 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.180037 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "ae78d89e-7970-49df-8839-b1b6d7de4ec1" (UID: "ae78d89e-7970-49df-8839-b1b6d7de4ec1"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.268974 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-var-lib-openvswitch\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269017 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-systemd-units\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269049 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-cni-netd\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269127 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6n8n\" (UniqueName: \"kubernetes.io/projected/9f7076a8-349f-4ff2-9489-f4c034e17c17-kube-api-access-d6n8n\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269157 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-run-systemd\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269174 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9f7076a8-349f-4ff2-9489-f4c034e17c17-env-overrides\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269188 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-run-ovn-kubernetes\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269214 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-etc-openvswitch\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269231 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-run-netns\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269245 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9f7076a8-349f-4ff2-9489-f4c034e17c17-ovnkube-config\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269261 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269277 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-node-log\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269293 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9f7076a8-349f-4ff2-9489-f4c034e17c17-ovnkube-script-lib\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269308 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-run-openvswitch\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269331 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-log-socket\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269346 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-kubelet\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269365 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-slash\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269381 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-cni-bin\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269396 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9f7076a8-349f-4ff2-9489-f4c034e17c17-ovn-node-metrics-cert\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269411 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-run-ovn\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269442 4755 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269453 4755 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269461 4755 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-slash\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269470 4755 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269478 4755 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269487 4755 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-run-systemd\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269495 4755 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-run-netns\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269503 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rw4t5\" (UniqueName: \"kubernetes.io/projected/ae78d89e-7970-49df-8839-b1b6d7de4ec1-kube-api-access-rw4t5\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269510 4755 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-node-log\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269518 4755 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-systemd-units\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269525 4755 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-cni-netd\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269533 4755 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269541 4755 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-cni-bin\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269549 4755 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269557 4755 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269565 4755 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ae78d89e-7970-49df-8839-b1b6d7de4ec1-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269573 4755 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.269580 4755 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ae78d89e-7970-49df-8839-b1b6d7de4ec1-host-kubelet\") on node \"crc\" DevicePath \"\"" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.370194 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-run-netns\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.370235 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9f7076a8-349f-4ff2-9489-f4c034e17c17-ovnkube-config\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.370252 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.370270 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-node-log\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.370285 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9f7076a8-349f-4ff2-9489-f4c034e17c17-ovnkube-script-lib\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.370302 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-run-openvswitch\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.370325 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-kubelet\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.370338 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-log-socket\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.370356 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-slash\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.370420 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-cni-bin\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.370453 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-run-netns\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371012 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9f7076a8-349f-4ff2-9489-f4c034e17c17-ovnkube-config\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371048 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371069 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-node-log\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371460 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9f7076a8-349f-4ff2-9489-f4c034e17c17-ovnkube-script-lib\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371495 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-run-openvswitch\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371514 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-kubelet\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371532 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-log-socket\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371552 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-slash\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.370376 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-cni-bin\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371576 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-run-ovn\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371590 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9f7076a8-349f-4ff2-9489-f4c034e17c17-ovn-node-metrics-cert\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371608 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-var-lib-openvswitch\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371622 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-systemd-units\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371641 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-cni-netd\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371657 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6n8n\" (UniqueName: \"kubernetes.io/projected/9f7076a8-349f-4ff2-9489-f4c034e17c17-kube-api-access-d6n8n\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371676 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-run-systemd\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371693 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9f7076a8-349f-4ff2-9489-f4c034e17c17-env-overrides\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371707 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-run-ovn-kubernetes\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371743 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-etc-openvswitch\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371786 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-etc-openvswitch\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.371806 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-run-ovn\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.372223 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-systemd-units\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.372283 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-var-lib-openvswitch\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.372248 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-cni-netd\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.372344 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-host-run-ovn-kubernetes\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.372359 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9f7076a8-349f-4ff2-9489-f4c034e17c17-run-systemd\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.372846 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9f7076a8-349f-4ff2-9489-f4c034e17c17-env-overrides\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.374305 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9f7076a8-349f-4ff2-9489-f4c034e17c17-ovn-node-metrics-cert\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.390963 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6n8n\" (UniqueName: \"kubernetes.io/projected/9f7076a8-349f-4ff2-9489-f4c034e17c17-kube-api-access-d6n8n\") pod \"ovnkube-node-mpvkm\" (UID: \"9f7076a8-349f-4ff2-9489-f4c034e17c17\") " pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.474858 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.945207 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovn-acl-logging/0.log" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.945819 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4mblb_ae78d89e-7970-49df-8839-b1b6d7de4ec1/ovn-controller/0.log" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.946126 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" event={"ID":"ae78d89e-7970-49df-8839-b1b6d7de4ec1","Type":"ContainerDied","Data":"59354c8d6573fdee55706a130865ae6220464a942d3779cd9dea9509181468e9"} Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.946162 4755 scope.go:117] "RemoveContainer" containerID="aaad222fc241dbf84cc91f20beb39ee65aafdf28e0738184def5d58b69e05917" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.946206 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4mblb" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.948438 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5fdlw_c206b6fd-200d-47ea-88a5-453f3093c749/kube-multus/2.log" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.950008 4755 generic.go:334] "Generic (PLEG): container finished" podID="9f7076a8-349f-4ff2-9489-f4c034e17c17" containerID="f9b23a55d0e047ecf7620f913cd05fbf2ffb0bfaa3dc701c13e34924a65bb553" exitCode=0 Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.950049 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" event={"ID":"9f7076a8-349f-4ff2-9489-f4c034e17c17","Type":"ContainerDied","Data":"f9b23a55d0e047ecf7620f913cd05fbf2ffb0bfaa3dc701c13e34924a65bb553"} Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.950082 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" event={"ID":"9f7076a8-349f-4ff2-9489-f4c034e17c17","Type":"ContainerStarted","Data":"f1618139168a957c1ba0a42719ebb512a833939574adb3c6bec0b826ce38c0a1"} Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.966544 4755 scope.go:117] "RemoveContainer" containerID="13a43ed30c54a778a8d15a3dae3450b5b42aebcb99b4cfdb1c38c8350770369d" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.980131 4755 scope.go:117] "RemoveContainer" containerID="a3e9a87a55bcf144fe16b9a6b18235e10b7dcc9ebbaa8fa524f3aa80ab1d98ca" Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.992003 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4mblb"] Feb 02 22:44:48 crc kubenswrapper[4755]: I0202 22:44:48.996388 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4mblb"] Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.004582 4755 scope.go:117] "RemoveContainer" containerID="f5f990877b70d818d7bb4f92c4d5b3587cec92ebbe30a92b9c25f81a916da984" Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.018849 4755 scope.go:117] "RemoveContainer" containerID="e7b19c6b1147684283a6fb3fc2dcfc6e7ea49ea5837bd668caa82a98c3421337" Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.037986 4755 scope.go:117] "RemoveContainer" containerID="11d01eeefac978d8da553dcf5b13aa0f56e5b5a3da4a630397f39d402826208c" Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.063642 4755 scope.go:117] "RemoveContainer" containerID="5dd743feb6696ceb53f73ff3cc592e1801d637032cc2677e93e31d67ce6578cc" Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.082610 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae78d89e-7970-49df-8839-b1b6d7de4ec1" path="/var/lib/kubelet/pods/ae78d89e-7970-49df-8839-b1b6d7de4ec1/volumes" Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.098173 4755 scope.go:117] "RemoveContainer" containerID="51d5a170374998acc548ac8521279efee3560ba6c24f2873dfbfbab8cd66f368" Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.121079 4755 scope.go:117] "RemoveContainer" containerID="ad235df283265a17bc61bf4b419d947f8146d8ac46fe1ac02b5b2fe694df0049" Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.958858 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" event={"ID":"9f7076a8-349f-4ff2-9489-f4c034e17c17","Type":"ContainerStarted","Data":"ac8ae35ea9b25e32bcd923f72afa89e588ad11fe881078c463a2d5f004b2991d"} Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.959140 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" event={"ID":"9f7076a8-349f-4ff2-9489-f4c034e17c17","Type":"ContainerStarted","Data":"e465f732e8b59a35cb712c2b15cd17922a0c985f3f1dfc0eaeac1e89fae37be7"} Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.959154 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" event={"ID":"9f7076a8-349f-4ff2-9489-f4c034e17c17","Type":"ContainerStarted","Data":"603c520dfd1cc9e1cd72e7360fa74b83a5b605a846655a99d5adc7cc9ad9bad8"} Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.959167 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" event={"ID":"9f7076a8-349f-4ff2-9489-f4c034e17c17","Type":"ContainerStarted","Data":"57a09441e891d2a3cf9b9f6ee131ff9a62c77da662fda91dab581c3bcec24af1"} Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.959180 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" event={"ID":"9f7076a8-349f-4ff2-9489-f4c034e17c17","Type":"ContainerStarted","Data":"b1a50d9db6de5422413b056004d5c4d9d57e7d04b76663cc0554f99e3f1a48bd"} Feb 02 22:44:49 crc kubenswrapper[4755]: I0202 22:44:49.959193 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" event={"ID":"9f7076a8-349f-4ff2-9489-f4c034e17c17","Type":"ContainerStarted","Data":"d8a10375456a9b551f0e5e582ea1cc581fe1bdcad330f1a09513546a250b14f2"} Feb 02 22:44:50 crc kubenswrapper[4755]: I0202 22:44:50.794571 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx"] Feb 02 22:44:50 crc kubenswrapper[4755]: I0202 22:44:50.795203 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:44:50 crc kubenswrapper[4755]: I0202 22:44:50.803105 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Feb 02 22:44:50 crc kubenswrapper[4755]: I0202 22:44:50.804091 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-xp6gf" Feb 02 22:44:50 crc kubenswrapper[4755]: I0202 22:44:50.804500 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Feb 02 22:44:50 crc kubenswrapper[4755]: I0202 22:44:50.900525 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbrf5\" (UniqueName: \"kubernetes.io/projected/2c08ecd8-3885-49b8-af63-b0ffee6b10ef-kube-api-access-cbrf5\") pod \"obo-prometheus-operator-68bc856cb9-tbnvx\" (UID: \"2c08ecd8-3885-49b8-af63-b0ffee6b10ef\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:44:50 crc kubenswrapper[4755]: I0202 22:44:50.904245 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff"] Feb 02 22:44:50 crc kubenswrapper[4755]: I0202 22:44:50.904912 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:50 crc kubenswrapper[4755]: I0202 22:44:50.907517 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Feb 02 22:44:50 crc kubenswrapper[4755]: I0202 22:44:50.907649 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-ft6lc" Feb 02 22:44:50 crc kubenswrapper[4755]: I0202 22:44:50.928216 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh"] Feb 02 22:44:50 crc kubenswrapper[4755]: I0202 22:44:50.928942 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.002118 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbrf5\" (UniqueName: \"kubernetes.io/projected/2c08ecd8-3885-49b8-af63-b0ffee6b10ef-kube-api-access-cbrf5\") pod \"obo-prometheus-operator-68bc856cb9-tbnvx\" (UID: \"2c08ecd8-3885-49b8-af63-b0ffee6b10ef\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.023145 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-gs2gv"] Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.023966 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.028149 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.030338 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-sbfgn" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.035619 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbrf5\" (UniqueName: \"kubernetes.io/projected/2c08ecd8-3885-49b8-af63-b0ffee6b10ef-kube-api-access-cbrf5\") pod \"obo-prometheus-operator-68bc856cb9-tbnvx\" (UID: \"2c08ecd8-3885-49b8-af63-b0ffee6b10ef\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.104209 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/425aaf78-7478-49df-822c-f7108a7765bb-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff\" (UID: \"425aaf78-7478-49df-822c-f7108a7765bb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.104291 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/71505c0d-899c-4c32-8563-8185908f8f5a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh\" (UID: \"71505c0d-899c-4c32-8563-8185908f8f5a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.104529 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/71505c0d-899c-4c32-8563-8185908f8f5a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh\" (UID: \"71505c0d-899c-4c32-8563-8185908f8f5a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.104763 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/425aaf78-7478-49df-822c-f7108a7765bb-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff\" (UID: \"425aaf78-7478-49df-822c-f7108a7765bb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.109168 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.114515 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-ppqsb"] Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.115294 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.116993 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-csrqn" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.133057 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators_2c08ecd8-3885-49b8-af63-b0ffee6b10ef_0(0f64adc902b7b2819f827c683c06f8080e310e9851d283905f3ec7f9b00f2155): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.133393 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators_2c08ecd8-3885-49b8-af63-b0ffee6b10ef_0(0f64adc902b7b2819f827c683c06f8080e310e9851d283905f3ec7f9b00f2155): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.133414 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators_2c08ecd8-3885-49b8-af63-b0ffee6b10ef_0(0f64adc902b7b2819f827c683c06f8080e310e9851d283905f3ec7f9b00f2155): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.133458 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators(2c08ecd8-3885-49b8-af63-b0ffee6b10ef)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators(2c08ecd8-3885-49b8-af63-b0ffee6b10ef)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators_2c08ecd8-3885-49b8-af63-b0ffee6b10ef_0(0f64adc902b7b2819f827c683c06f8080e310e9851d283905f3ec7f9b00f2155): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" podUID="2c08ecd8-3885-49b8-af63-b0ffee6b10ef" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.206103 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/425aaf78-7478-49df-822c-f7108a7765bb-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff\" (UID: \"425aaf78-7478-49df-822c-f7108a7765bb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.206188 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74mmf\" (UniqueName: \"kubernetes.io/projected/c62fe136-2618-4f3a-b6d0-b6d35df54f4c-kube-api-access-74mmf\") pod \"observability-operator-59bdc8b94-gs2gv\" (UID: \"c62fe136-2618-4f3a-b6d0-b6d35df54f4c\") " pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.206220 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/71505c0d-899c-4c32-8563-8185908f8f5a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh\" (UID: \"71505c0d-899c-4c32-8563-8185908f8f5a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.206254 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/71505c0d-899c-4c32-8563-8185908f8f5a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh\" (UID: \"71505c0d-899c-4c32-8563-8185908f8f5a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.206402 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/c62fe136-2618-4f3a-b6d0-b6d35df54f4c-observability-operator-tls\") pod \"observability-operator-59bdc8b94-gs2gv\" (UID: \"c62fe136-2618-4f3a-b6d0-b6d35df54f4c\") " pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.206513 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/425aaf78-7478-49df-822c-f7108a7765bb-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff\" (UID: \"425aaf78-7478-49df-822c-f7108a7765bb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.209578 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/425aaf78-7478-49df-822c-f7108a7765bb-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff\" (UID: \"425aaf78-7478-49df-822c-f7108a7765bb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.211109 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/71505c0d-899c-4c32-8563-8185908f8f5a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh\" (UID: \"71505c0d-899c-4c32-8563-8185908f8f5a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.214206 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/425aaf78-7478-49df-822c-f7108a7765bb-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff\" (UID: \"425aaf78-7478-49df-822c-f7108a7765bb\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.215164 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/71505c0d-899c-4c32-8563-8185908f8f5a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh\" (UID: \"71505c0d-899c-4c32-8563-8185908f8f5a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.218093 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.242552 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators_425aaf78-7478-49df-822c-f7108a7765bb_0(9f4451c62314b314562f04ca7cc5d06516aa750c88082d817feb85c4e2cbf13a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.242621 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators_425aaf78-7478-49df-822c-f7108a7765bb_0(9f4451c62314b314562f04ca7cc5d06516aa750c88082d817feb85c4e2cbf13a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.242645 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators_425aaf78-7478-49df-822c-f7108a7765bb_0(9f4451c62314b314562f04ca7cc5d06516aa750c88082d817feb85c4e2cbf13a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.242690 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators(425aaf78-7478-49df-822c-f7108a7765bb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators(425aaf78-7478-49df-822c-f7108a7765bb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators_425aaf78-7478-49df-822c-f7108a7765bb_0(9f4451c62314b314562f04ca7cc5d06516aa750c88082d817feb85c4e2cbf13a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" podUID="425aaf78-7478-49df-822c-f7108a7765bb" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.242748 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.274910 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators_71505c0d-899c-4c32-8563-8185908f8f5a_0(9b504c06070fc4bf1f5a539d59198de9df745996a36bf62015612cfd0115ee58): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.274974 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators_71505c0d-899c-4c32-8563-8185908f8f5a_0(9b504c06070fc4bf1f5a539d59198de9df745996a36bf62015612cfd0115ee58): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.274994 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators_71505c0d-899c-4c32-8563-8185908f8f5a_0(9b504c06070fc4bf1f5a539d59198de9df745996a36bf62015612cfd0115ee58): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.275039 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators(71505c0d-899c-4c32-8563-8185908f8f5a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators(71505c0d-899c-4c32-8563-8185908f8f5a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators_71505c0d-899c-4c32-8563-8185908f8f5a_0(9b504c06070fc4bf1f5a539d59198de9df745996a36bf62015612cfd0115ee58): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" podUID="71505c0d-899c-4c32-8563-8185908f8f5a" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.307358 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74mmf\" (UniqueName: \"kubernetes.io/projected/c62fe136-2618-4f3a-b6d0-b6d35df54f4c-kube-api-access-74mmf\") pod \"observability-operator-59bdc8b94-gs2gv\" (UID: \"c62fe136-2618-4f3a-b6d0-b6d35df54f4c\") " pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.307440 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/c62fe136-2618-4f3a-b6d0-b6d35df54f4c-observability-operator-tls\") pod \"observability-operator-59bdc8b94-gs2gv\" (UID: \"c62fe136-2618-4f3a-b6d0-b6d35df54f4c\") " pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.307472 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5h44j\" (UniqueName: \"kubernetes.io/projected/5a9aeb48-ea8d-4648-a311-263475a2738c-kube-api-access-5h44j\") pod \"perses-operator-5bf474d74f-ppqsb\" (UID: \"5a9aeb48-ea8d-4648-a311-263475a2738c\") " pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.307500 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/5a9aeb48-ea8d-4648-a311-263475a2738c-openshift-service-ca\") pod \"perses-operator-5bf474d74f-ppqsb\" (UID: \"5a9aeb48-ea8d-4648-a311-263475a2738c\") " pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.312171 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/c62fe136-2618-4f3a-b6d0-b6d35df54f4c-observability-operator-tls\") pod \"observability-operator-59bdc8b94-gs2gv\" (UID: \"c62fe136-2618-4f3a-b6d0-b6d35df54f4c\") " pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.324976 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74mmf\" (UniqueName: \"kubernetes.io/projected/c62fe136-2618-4f3a-b6d0-b6d35df54f4c-kube-api-access-74mmf\") pod \"observability-operator-59bdc8b94-gs2gv\" (UID: \"c62fe136-2618-4f3a-b6d0-b6d35df54f4c\") " pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.353502 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.374416 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-59bdc8b94-gs2gv_openshift-operators_c62fe136-2618-4f3a-b6d0-b6d35df54f4c_0(8cdb53e071d80fd74bf81a5ab6d35f7dba067816830dbc1b3af3f4fc2d54936a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.374484 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-59bdc8b94-gs2gv_openshift-operators_c62fe136-2618-4f3a-b6d0-b6d35df54f4c_0(8cdb53e071d80fd74bf81a5ab6d35f7dba067816830dbc1b3af3f4fc2d54936a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.374503 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-59bdc8b94-gs2gv_openshift-operators_c62fe136-2618-4f3a-b6d0-b6d35df54f4c_0(8cdb53e071d80fd74bf81a5ab6d35f7dba067816830dbc1b3af3f4fc2d54936a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.374551 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-59bdc8b94-gs2gv_openshift-operators(c62fe136-2618-4f3a-b6d0-b6d35df54f4c)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-59bdc8b94-gs2gv_openshift-operators(c62fe136-2618-4f3a-b6d0-b6d35df54f4c)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-59bdc8b94-gs2gv_openshift-operators_c62fe136-2618-4f3a-b6d0-b6d35df54f4c_0(8cdb53e071d80fd74bf81a5ab6d35f7dba067816830dbc1b3af3f4fc2d54936a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" podUID="c62fe136-2618-4f3a-b6d0-b6d35df54f4c" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.408766 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5h44j\" (UniqueName: \"kubernetes.io/projected/5a9aeb48-ea8d-4648-a311-263475a2738c-kube-api-access-5h44j\") pod \"perses-operator-5bf474d74f-ppqsb\" (UID: \"5a9aeb48-ea8d-4648-a311-263475a2738c\") " pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.408851 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/5a9aeb48-ea8d-4648-a311-263475a2738c-openshift-service-ca\") pod \"perses-operator-5bf474d74f-ppqsb\" (UID: \"5a9aeb48-ea8d-4648-a311-263475a2738c\") " pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.410131 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/5a9aeb48-ea8d-4648-a311-263475a2738c-openshift-service-ca\") pod \"perses-operator-5bf474d74f-ppqsb\" (UID: \"5a9aeb48-ea8d-4648-a311-263475a2738c\") " pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.428416 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5h44j\" (UniqueName: \"kubernetes.io/projected/5a9aeb48-ea8d-4648-a311-263475a2738c-kube-api-access-5h44j\") pod \"perses-operator-5bf474d74f-ppqsb\" (UID: \"5a9aeb48-ea8d-4648-a311-263475a2738c\") " pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.464693 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.492710 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5bf474d74f-ppqsb_openshift-operators_5a9aeb48-ea8d-4648-a311-263475a2738c_0(519aa759d5f914efb2f66edf8e0970118de4bf216b0d42349d5dd226e51098e8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.492795 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5bf474d74f-ppqsb_openshift-operators_5a9aeb48-ea8d-4648-a311-263475a2738c_0(519aa759d5f914efb2f66edf8e0970118de4bf216b0d42349d5dd226e51098e8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.492835 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5bf474d74f-ppqsb_openshift-operators_5a9aeb48-ea8d-4648-a311-263475a2738c_0(519aa759d5f914efb2f66edf8e0970118de4bf216b0d42349d5dd226e51098e8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:51 crc kubenswrapper[4755]: E0202 22:44:51.492874 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5bf474d74f-ppqsb_openshift-operators(5a9aeb48-ea8d-4648-a311-263475a2738c)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5bf474d74f-ppqsb_openshift-operators(5a9aeb48-ea8d-4648-a311-263475a2738c)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5bf474d74f-ppqsb_openshift-operators_5a9aeb48-ea8d-4648-a311-263475a2738c_0(519aa759d5f914efb2f66edf8e0970118de4bf216b0d42349d5dd226e51098e8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" podUID="5a9aeb48-ea8d-4648-a311-263475a2738c" Feb 02 22:44:51 crc kubenswrapper[4755]: I0202 22:44:51.974372 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" event={"ID":"9f7076a8-349f-4ff2-9489-f4c034e17c17","Type":"ContainerStarted","Data":"8d6b165797663ac7bcaaa5f58fa083df44976d0a78a80963f52f93a7cc748cb5"} Feb 02 22:44:54 crc kubenswrapper[4755]: I0202 22:44:54.977224 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx"] Feb 02 22:44:54 crc kubenswrapper[4755]: I0202 22:44:54.977585 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:44:54 crc kubenswrapper[4755]: I0202 22:44:54.978001 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:44:54 crc kubenswrapper[4755]: I0202 22:44:54.988926 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-ppqsb"] Feb 02 22:44:54 crc kubenswrapper[4755]: I0202 22:44:54.989047 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:54 crc kubenswrapper[4755]: I0202 22:44:54.989481 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:54 crc kubenswrapper[4755]: I0202 22:44:54.993984 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh"] Feb 02 22:44:54 crc kubenswrapper[4755]: I0202 22:44:54.994395 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:55 crc kubenswrapper[4755]: I0202 22:44:54.998304 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:55 crc kubenswrapper[4755]: I0202 22:44:55.001682 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" event={"ID":"9f7076a8-349f-4ff2-9489-f4c034e17c17","Type":"ContainerStarted","Data":"be8c38a155bc73ae5998d5d0fef1262e6c602dc4238032efcfb1a9f766fcd097"} Feb 02 22:44:55 crc kubenswrapper[4755]: I0202 22:44:55.002004 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:55 crc kubenswrapper[4755]: I0202 22:44:55.013317 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-gs2gv"] Feb 02 22:44:55 crc kubenswrapper[4755]: I0202 22:44:55.013416 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:55 crc kubenswrapper[4755]: I0202 22:44:55.013812 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.026899 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators_2c08ecd8-3885-49b8-af63-b0ffee6b10ef_0(9bcec54d85ac8ff2510e7e85742b68a2507188fea63360877b14106b80e6e5a5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.026960 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators_2c08ecd8-3885-49b8-af63-b0ffee6b10ef_0(9bcec54d85ac8ff2510e7e85742b68a2507188fea63360877b14106b80e6e5a5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.026983 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators_2c08ecd8-3885-49b8-af63-b0ffee6b10ef_0(9bcec54d85ac8ff2510e7e85742b68a2507188fea63360877b14106b80e6e5a5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.027025 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators(2c08ecd8-3885-49b8-af63-b0ffee6b10ef)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators(2c08ecd8-3885-49b8-af63-b0ffee6b10ef)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators_2c08ecd8-3885-49b8-af63-b0ffee6b10ef_0(9bcec54d85ac8ff2510e7e85742b68a2507188fea63360877b14106b80e6e5a5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" podUID="2c08ecd8-3885-49b8-af63-b0ffee6b10ef" Feb 02 22:44:55 crc kubenswrapper[4755]: I0202 22:44:55.035062 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff"] Feb 02 22:44:55 crc kubenswrapper[4755]: I0202 22:44:55.035176 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:55 crc kubenswrapper[4755]: I0202 22:44:55.035521 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:55 crc kubenswrapper[4755]: I0202 22:44:55.046389 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" podStartSLOduration=7.04637503 podStartE2EDuration="7.04637503s" podCreationTimestamp="2026-02-02 22:44:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:44:55.045365532 +0000 UTC m=+650.736585858" watchObservedRunningTime="2026-02-02 22:44:55.04637503 +0000 UTC m=+650.737595346" Feb 02 22:44:55 crc kubenswrapper[4755]: I0202 22:44:55.060885 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.077144 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5bf474d74f-ppqsb_openshift-operators_5a9aeb48-ea8d-4648-a311-263475a2738c_0(da2830f2f042e8b336b8b1b9f1c3b16ac7e297932a87258f927856f0540a2367): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.077213 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5bf474d74f-ppqsb_openshift-operators_5a9aeb48-ea8d-4648-a311-263475a2738c_0(da2830f2f042e8b336b8b1b9f1c3b16ac7e297932a87258f927856f0540a2367): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.077244 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5bf474d74f-ppqsb_openshift-operators_5a9aeb48-ea8d-4648-a311-263475a2738c_0(da2830f2f042e8b336b8b1b9f1c3b16ac7e297932a87258f927856f0540a2367): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.077283 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5bf474d74f-ppqsb_openshift-operators(5a9aeb48-ea8d-4648-a311-263475a2738c)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5bf474d74f-ppqsb_openshift-operators(5a9aeb48-ea8d-4648-a311-263475a2738c)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5bf474d74f-ppqsb_openshift-operators_5a9aeb48-ea8d-4648-a311-263475a2738c_0(da2830f2f042e8b336b8b1b9f1c3b16ac7e297932a87258f927856f0540a2367): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" podUID="5a9aeb48-ea8d-4648-a311-263475a2738c" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.081356 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators_71505c0d-899c-4c32-8563-8185908f8f5a_0(55b5461de2ea928b5177af28e5b1710d7bd3a384d22602c9425dbcd4d8301117): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.081510 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators_71505c0d-899c-4c32-8563-8185908f8f5a_0(55b5461de2ea928b5177af28e5b1710d7bd3a384d22602c9425dbcd4d8301117): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.081534 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators_71505c0d-899c-4c32-8563-8185908f8f5a_0(55b5461de2ea928b5177af28e5b1710d7bd3a384d22602c9425dbcd4d8301117): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.081563 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators(71505c0d-899c-4c32-8563-8185908f8f5a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators(71505c0d-899c-4c32-8563-8185908f8f5a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators_71505c0d-899c-4c32-8563-8185908f8f5a_0(55b5461de2ea928b5177af28e5b1710d7bd3a384d22602c9425dbcd4d8301117): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" podUID="71505c0d-899c-4c32-8563-8185908f8f5a" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.103844 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-59bdc8b94-gs2gv_openshift-operators_c62fe136-2618-4f3a-b6d0-b6d35df54f4c_0(7ae491377cf8d691d0103585bc7786a22a7c8c71c0588489bda5edf139f1d294): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.103913 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-59bdc8b94-gs2gv_openshift-operators_c62fe136-2618-4f3a-b6d0-b6d35df54f4c_0(7ae491377cf8d691d0103585bc7786a22a7c8c71c0588489bda5edf139f1d294): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.103934 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-59bdc8b94-gs2gv_openshift-operators_c62fe136-2618-4f3a-b6d0-b6d35df54f4c_0(7ae491377cf8d691d0103585bc7786a22a7c8c71c0588489bda5edf139f1d294): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.103981 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-59bdc8b94-gs2gv_openshift-operators(c62fe136-2618-4f3a-b6d0-b6d35df54f4c)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-59bdc8b94-gs2gv_openshift-operators(c62fe136-2618-4f3a-b6d0-b6d35df54f4c)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-59bdc8b94-gs2gv_openshift-operators_c62fe136-2618-4f3a-b6d0-b6d35df54f4c_0(7ae491377cf8d691d0103585bc7786a22a7c8c71c0588489bda5edf139f1d294): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" podUID="c62fe136-2618-4f3a-b6d0-b6d35df54f4c" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.108765 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators_425aaf78-7478-49df-822c-f7108a7765bb_0(4032bf51cdc2eb4b8acf955906d9392ad215c5818e588d4ecc86e51c710c965a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.108838 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators_425aaf78-7478-49df-822c-f7108a7765bb_0(4032bf51cdc2eb4b8acf955906d9392ad215c5818e588d4ecc86e51c710c965a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.108870 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators_425aaf78-7478-49df-822c-f7108a7765bb_0(4032bf51cdc2eb4b8acf955906d9392ad215c5818e588d4ecc86e51c710c965a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:44:55 crc kubenswrapper[4755]: E0202 22:44:55.108932 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators(425aaf78-7478-49df-822c-f7108a7765bb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators(425aaf78-7478-49df-822c-f7108a7765bb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators_425aaf78-7478-49df-822c-f7108a7765bb_0(4032bf51cdc2eb4b8acf955906d9392ad215c5818e588d4ecc86e51c710c965a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" podUID="425aaf78-7478-49df-822c-f7108a7765bb" Feb 02 22:44:56 crc kubenswrapper[4755]: I0202 22:44:56.007177 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:56 crc kubenswrapper[4755]: I0202 22:44:56.007262 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:44:56 crc kubenswrapper[4755]: I0202 22:44:56.118791 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.162576 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc"] Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.164638 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.166781 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.167130 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.168082 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc"] Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.336938 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkm8l\" (UniqueName: \"kubernetes.io/projected/6f623968-924c-4daa-acc5-9dcc77105d07-kube-api-access-zkm8l\") pod \"collect-profiles-29501205-cr5zc\" (UID: \"6f623968-924c-4daa-acc5-9dcc77105d07\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.337036 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f623968-924c-4daa-acc5-9dcc77105d07-config-volume\") pod \"collect-profiles-29501205-cr5zc\" (UID: \"6f623968-924c-4daa-acc5-9dcc77105d07\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.337096 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f623968-924c-4daa-acc5-9dcc77105d07-secret-volume\") pod \"collect-profiles-29501205-cr5zc\" (UID: \"6f623968-924c-4daa-acc5-9dcc77105d07\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.438780 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f623968-924c-4daa-acc5-9dcc77105d07-secret-volume\") pod \"collect-profiles-29501205-cr5zc\" (UID: \"6f623968-924c-4daa-acc5-9dcc77105d07\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.438848 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkm8l\" (UniqueName: \"kubernetes.io/projected/6f623968-924c-4daa-acc5-9dcc77105d07-kube-api-access-zkm8l\") pod \"collect-profiles-29501205-cr5zc\" (UID: \"6f623968-924c-4daa-acc5-9dcc77105d07\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.438891 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f623968-924c-4daa-acc5-9dcc77105d07-config-volume\") pod \"collect-profiles-29501205-cr5zc\" (UID: \"6f623968-924c-4daa-acc5-9dcc77105d07\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.439626 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f623968-924c-4daa-acc5-9dcc77105d07-config-volume\") pod \"collect-profiles-29501205-cr5zc\" (UID: \"6f623968-924c-4daa-acc5-9dcc77105d07\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.447890 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f623968-924c-4daa-acc5-9dcc77105d07-secret-volume\") pod \"collect-profiles-29501205-cr5zc\" (UID: \"6f623968-924c-4daa-acc5-9dcc77105d07\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.464018 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkm8l\" (UniqueName: \"kubernetes.io/projected/6f623968-924c-4daa-acc5-9dcc77105d07-kube-api-access-zkm8l\") pod \"collect-profiles-29501205-cr5zc\" (UID: \"6f623968-924c-4daa-acc5-9dcc77105d07\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: I0202 22:45:00.488689 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: E0202 22:45:00.531638 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager_6f623968-924c-4daa-acc5-9dcc77105d07_0(b5eed22c919741b30f8de4275373107b2e56ac3c0889256a9d99820942cd54bd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:45:00 crc kubenswrapper[4755]: E0202 22:45:00.531759 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager_6f623968-924c-4daa-acc5-9dcc77105d07_0(b5eed22c919741b30f8de4275373107b2e56ac3c0889256a9d99820942cd54bd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: E0202 22:45:00.531812 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager_6f623968-924c-4daa-acc5-9dcc77105d07_0(b5eed22c919741b30f8de4275373107b2e56ac3c0889256a9d99820942cd54bd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:00 crc kubenswrapper[4755]: E0202 22:45:00.531900 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager(6f623968-924c-4daa-acc5-9dcc77105d07)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager(6f623968-924c-4daa-acc5-9dcc77105d07)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager_6f623968-924c-4daa-acc5-9dcc77105d07_0(b5eed22c919741b30f8de4275373107b2e56ac3c0889256a9d99820942cd54bd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" podUID="6f623968-924c-4daa-acc5-9dcc77105d07" Feb 02 22:45:01 crc kubenswrapper[4755]: I0202 22:45:01.043066 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:01 crc kubenswrapper[4755]: I0202 22:45:01.043633 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:01 crc kubenswrapper[4755]: E0202 22:45:01.081573 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager_6f623968-924c-4daa-acc5-9dcc77105d07_0(ee5a9536b952a25ef9a3c1121985339c952a3b3cf2eb4882b8754430097360e9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:45:01 crc kubenswrapper[4755]: E0202 22:45:01.081634 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager_6f623968-924c-4daa-acc5-9dcc77105d07_0(ee5a9536b952a25ef9a3c1121985339c952a3b3cf2eb4882b8754430097360e9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:01 crc kubenswrapper[4755]: E0202 22:45:01.081665 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager_6f623968-924c-4daa-acc5-9dcc77105d07_0(ee5a9536b952a25ef9a3c1121985339c952a3b3cf2eb4882b8754430097360e9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:01 crc kubenswrapper[4755]: E0202 22:45:01.081718 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager(6f623968-924c-4daa-acc5-9dcc77105d07)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager(6f623968-924c-4daa-acc5-9dcc77105d07)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager_6f623968-924c-4daa-acc5-9dcc77105d07_0(ee5a9536b952a25ef9a3c1121985339c952a3b3cf2eb4882b8754430097360e9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" podUID="6f623968-924c-4daa-acc5-9dcc77105d07" Feb 02 22:45:02 crc kubenswrapper[4755]: I0202 22:45:02.068862 4755 scope.go:117] "RemoveContainer" containerID="0058d3561a900f271e03fea16adb2cfa9d0fe60aa9931b488e9d55c739895d14" Feb 02 22:45:02 crc kubenswrapper[4755]: E0202 22:45:02.069175 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-5fdlw_openshift-multus(c206b6fd-200d-47ea-88a5-453f3093c749)\"" pod="openshift-multus/multus-5fdlw" podUID="c206b6fd-200d-47ea-88a5-453f3093c749" Feb 02 22:45:06 crc kubenswrapper[4755]: I0202 22:45:06.068030 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:45:06 crc kubenswrapper[4755]: I0202 22:45:06.068128 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:45:06 crc kubenswrapper[4755]: I0202 22:45:06.068891 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:45:06 crc kubenswrapper[4755]: I0202 22:45:06.069282 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:45:06 crc kubenswrapper[4755]: E0202 22:45:06.106951 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5bf474d74f-ppqsb_openshift-operators_5a9aeb48-ea8d-4648-a311-263475a2738c_0(035ad9d26e847fa05f97a6b639f8ab49a1ee1bc02973213bb28a224341d120dd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:45:06 crc kubenswrapper[4755]: E0202 22:45:06.107018 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5bf474d74f-ppqsb_openshift-operators_5a9aeb48-ea8d-4648-a311-263475a2738c_0(035ad9d26e847fa05f97a6b639f8ab49a1ee1bc02973213bb28a224341d120dd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:45:06 crc kubenswrapper[4755]: E0202 22:45:06.107046 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5bf474d74f-ppqsb_openshift-operators_5a9aeb48-ea8d-4648-a311-263475a2738c_0(035ad9d26e847fa05f97a6b639f8ab49a1ee1bc02973213bb28a224341d120dd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:45:06 crc kubenswrapper[4755]: E0202 22:45:06.107105 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5bf474d74f-ppqsb_openshift-operators(5a9aeb48-ea8d-4648-a311-263475a2738c)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5bf474d74f-ppqsb_openshift-operators(5a9aeb48-ea8d-4648-a311-263475a2738c)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5bf474d74f-ppqsb_openshift-operators_5a9aeb48-ea8d-4648-a311-263475a2738c_0(035ad9d26e847fa05f97a6b639f8ab49a1ee1bc02973213bb28a224341d120dd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" podUID="5a9aeb48-ea8d-4648-a311-263475a2738c" Feb 02 22:45:06 crc kubenswrapper[4755]: E0202 22:45:06.128361 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators_71505c0d-899c-4c32-8563-8185908f8f5a_0(ce3a2025d958c7963c5a8cceb2ccf1de7807d342d872fff1c251135857c374c2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:45:06 crc kubenswrapper[4755]: E0202 22:45:06.128451 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators_71505c0d-899c-4c32-8563-8185908f8f5a_0(ce3a2025d958c7963c5a8cceb2ccf1de7807d342d872fff1c251135857c374c2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:45:06 crc kubenswrapper[4755]: E0202 22:45:06.128487 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators_71505c0d-899c-4c32-8563-8185908f8f5a_0(ce3a2025d958c7963c5a8cceb2ccf1de7807d342d872fff1c251135857c374c2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:45:06 crc kubenswrapper[4755]: E0202 22:45:06.128553 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators(71505c0d-899c-4c32-8563-8185908f8f5a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators(71505c0d-899c-4c32-8563-8185908f8f5a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_openshift-operators_71505c0d-899c-4c32-8563-8185908f8f5a_0(ce3a2025d958c7963c5a8cceb2ccf1de7807d342d872fff1c251135857c374c2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" podUID="71505c0d-899c-4c32-8563-8185908f8f5a" Feb 02 22:45:08 crc kubenswrapper[4755]: I0202 22:45:08.068674 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:45:08 crc kubenswrapper[4755]: I0202 22:45:08.069239 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:45:08 crc kubenswrapper[4755]: E0202 22:45:08.101354 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-59bdc8b94-gs2gv_openshift-operators_c62fe136-2618-4f3a-b6d0-b6d35df54f4c_0(a60e0761a0a93de219996519cf1c51bf3aa2325473e3f4197dbb512789003739): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:45:08 crc kubenswrapper[4755]: E0202 22:45:08.101538 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-59bdc8b94-gs2gv_openshift-operators_c62fe136-2618-4f3a-b6d0-b6d35df54f4c_0(a60e0761a0a93de219996519cf1c51bf3aa2325473e3f4197dbb512789003739): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:45:08 crc kubenswrapper[4755]: E0202 22:45:08.101636 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-59bdc8b94-gs2gv_openshift-operators_c62fe136-2618-4f3a-b6d0-b6d35df54f4c_0(a60e0761a0a93de219996519cf1c51bf3aa2325473e3f4197dbb512789003739): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:45:08 crc kubenswrapper[4755]: E0202 22:45:08.101793 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-59bdc8b94-gs2gv_openshift-operators(c62fe136-2618-4f3a-b6d0-b6d35df54f4c)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-59bdc8b94-gs2gv_openshift-operators(c62fe136-2618-4f3a-b6d0-b6d35df54f4c)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-59bdc8b94-gs2gv_openshift-operators_c62fe136-2618-4f3a-b6d0-b6d35df54f4c_0(a60e0761a0a93de219996519cf1c51bf3aa2325473e3f4197dbb512789003739): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" podUID="c62fe136-2618-4f3a-b6d0-b6d35df54f4c" Feb 02 22:45:09 crc kubenswrapper[4755]: I0202 22:45:09.068110 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:45:09 crc kubenswrapper[4755]: I0202 22:45:09.068634 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:45:09 crc kubenswrapper[4755]: E0202 22:45:09.111773 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators_425aaf78-7478-49df-822c-f7108a7765bb_0(5ec567d17c4f934b3e0b227ba2c355ee5a86490615c9f7b25c85e4007e046e62): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:45:09 crc kubenswrapper[4755]: E0202 22:45:09.111866 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators_425aaf78-7478-49df-822c-f7108a7765bb_0(5ec567d17c4f934b3e0b227ba2c355ee5a86490615c9f7b25c85e4007e046e62): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:45:09 crc kubenswrapper[4755]: E0202 22:45:09.111901 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators_425aaf78-7478-49df-822c-f7108a7765bb_0(5ec567d17c4f934b3e0b227ba2c355ee5a86490615c9f7b25c85e4007e046e62): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:45:09 crc kubenswrapper[4755]: E0202 22:45:09.111997 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators(425aaf78-7478-49df-822c-f7108a7765bb)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators(425aaf78-7478-49df-822c-f7108a7765bb)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_openshift-operators_425aaf78-7478-49df-822c-f7108a7765bb_0(5ec567d17c4f934b3e0b227ba2c355ee5a86490615c9f7b25c85e4007e046e62): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" podUID="425aaf78-7478-49df-822c-f7108a7765bb" Feb 02 22:45:10 crc kubenswrapper[4755]: I0202 22:45:10.068473 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:45:10 crc kubenswrapper[4755]: I0202 22:45:10.069255 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:45:10 crc kubenswrapper[4755]: E0202 22:45:10.104072 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators_2c08ecd8-3885-49b8-af63-b0ffee6b10ef_0(8822b903782045d2f7ad7212b9fdffe34390f84c808c147bdca85954abaecc9e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:45:10 crc kubenswrapper[4755]: E0202 22:45:10.104240 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators_2c08ecd8-3885-49b8-af63-b0ffee6b10ef_0(8822b903782045d2f7ad7212b9fdffe34390f84c808c147bdca85954abaecc9e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:45:10 crc kubenswrapper[4755]: E0202 22:45:10.104313 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators_2c08ecd8-3885-49b8-af63-b0ffee6b10ef_0(8822b903782045d2f7ad7212b9fdffe34390f84c808c147bdca85954abaecc9e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:45:10 crc kubenswrapper[4755]: E0202 22:45:10.104408 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators(2c08ecd8-3885-49b8-af63-b0ffee6b10ef)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators(2c08ecd8-3885-49b8-af63-b0ffee6b10ef)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-68bc856cb9-tbnvx_openshift-operators_2c08ecd8-3885-49b8-af63-b0ffee6b10ef_0(8822b903782045d2f7ad7212b9fdffe34390f84c808c147bdca85954abaecc9e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" podUID="2c08ecd8-3885-49b8-af63-b0ffee6b10ef" Feb 02 22:45:12 crc kubenswrapper[4755]: I0202 22:45:12.068779 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:12 crc kubenswrapper[4755]: I0202 22:45:12.069236 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:12 crc kubenswrapper[4755]: E0202 22:45:12.132340 4755 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager_6f623968-924c-4daa-acc5-9dcc77105d07_0(8cb83a58b66d1dd2c3351208424046dd05ad92c1a27ba3af2da1f8b08c1aa53d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 22:45:12 crc kubenswrapper[4755]: E0202 22:45:12.132441 4755 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager_6f623968-924c-4daa-acc5-9dcc77105d07_0(8cb83a58b66d1dd2c3351208424046dd05ad92c1a27ba3af2da1f8b08c1aa53d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:12 crc kubenswrapper[4755]: E0202 22:45:12.132483 4755 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager_6f623968-924c-4daa-acc5-9dcc77105d07_0(8cb83a58b66d1dd2c3351208424046dd05ad92c1a27ba3af2da1f8b08c1aa53d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:12 crc kubenswrapper[4755]: E0202 22:45:12.132559 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager(6f623968-924c-4daa-acc5-9dcc77105d07)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager(6f623968-924c-4daa-acc5-9dcc77105d07)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29501205-cr5zc_openshift-operator-lifecycle-manager_6f623968-924c-4daa-acc5-9dcc77105d07_0(8cb83a58b66d1dd2c3351208424046dd05ad92c1a27ba3af2da1f8b08c1aa53d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" podUID="6f623968-924c-4daa-acc5-9dcc77105d07" Feb 02 22:45:15 crc kubenswrapper[4755]: I0202 22:45:15.074371 4755 scope.go:117] "RemoveContainer" containerID="0058d3561a900f271e03fea16adb2cfa9d0fe60aa9931b488e9d55c739895d14" Feb 02 22:45:16 crc kubenswrapper[4755]: I0202 22:45:16.149512 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5fdlw_c206b6fd-200d-47ea-88a5-453f3093c749/kube-multus/2.log" Feb 02 22:45:16 crc kubenswrapper[4755]: I0202 22:45:16.149617 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5fdlw" event={"ID":"c206b6fd-200d-47ea-88a5-453f3093c749","Type":"ContainerStarted","Data":"378acec807b39b1c4765c70c4fa1a0318b92f89eaa937b2e583b5578daaf16f3"} Feb 02 22:45:17 crc kubenswrapper[4755]: I0202 22:45:17.068095 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:45:17 crc kubenswrapper[4755]: I0202 22:45:17.068824 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" Feb 02 22:45:17 crc kubenswrapper[4755]: I0202 22:45:17.323257 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh"] Feb 02 22:45:18 crc kubenswrapper[4755]: I0202 22:45:18.166026 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" event={"ID":"71505c0d-899c-4c32-8563-8185908f8f5a","Type":"ContainerStarted","Data":"a0ef30ffa2807858979832c686e3caedda6977e9bdff6e6182579219845e810a"} Feb 02 22:45:18 crc kubenswrapper[4755]: I0202 22:45:18.516434 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mpvkm" Feb 02 22:45:20 crc kubenswrapper[4755]: I0202 22:45:20.068435 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:45:20 crc kubenswrapper[4755]: I0202 22:45:20.069036 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:45:21 crc kubenswrapper[4755]: I0202 22:45:21.555843 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-ppqsb"] Feb 02 22:45:21 crc kubenswrapper[4755]: W0202 22:45:21.564577 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a9aeb48_ea8d_4648_a311_263475a2738c.slice/crio-642c3ee3e7e82ddc0b44ffed57a8f3dbb3dc1c9efb4b99063d30a56ab315b2c0 WatchSource:0}: Error finding container 642c3ee3e7e82ddc0b44ffed57a8f3dbb3dc1c9efb4b99063d30a56ab315b2c0: Status 404 returned error can't find the container with id 642c3ee3e7e82ddc0b44ffed57a8f3dbb3dc1c9efb4b99063d30a56ab315b2c0 Feb 02 22:45:22 crc kubenswrapper[4755]: I0202 22:45:22.188285 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" event={"ID":"71505c0d-899c-4c32-8563-8185908f8f5a","Type":"ContainerStarted","Data":"257f1b84b6c2ea87229912f3bf8d4ef2bc0c57ab72c30dba30891db95afde65f"} Feb 02 22:45:22 crc kubenswrapper[4755]: I0202 22:45:22.189114 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" event={"ID":"5a9aeb48-ea8d-4648-a311-263475a2738c","Type":"ContainerStarted","Data":"642c3ee3e7e82ddc0b44ffed57a8f3dbb3dc1c9efb4b99063d30a56ab315b2c0"} Feb 02 22:45:22 crc kubenswrapper[4755]: I0202 22:45:22.207903 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh" podStartSLOduration=28.302611596 podStartE2EDuration="32.207880652s" podCreationTimestamp="2026-02-02 22:44:50 +0000 UTC" firstStartedPulling="2026-02-02 22:45:17.341128626 +0000 UTC m=+673.032348962" lastFinishedPulling="2026-02-02 22:45:21.246397682 +0000 UTC m=+676.937618018" observedRunningTime="2026-02-02 22:45:22.202054629 +0000 UTC m=+677.893274985" watchObservedRunningTime="2026-02-02 22:45:22.207880652 +0000 UTC m=+677.899100998" Feb 02 22:45:23 crc kubenswrapper[4755]: I0202 22:45:23.068922 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:45:23 crc kubenswrapper[4755]: I0202 22:45:23.068929 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:45:23 crc kubenswrapper[4755]: I0202 22:45:23.069531 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:45:23 crc kubenswrapper[4755]: I0202 22:45:23.069906 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" Feb 02 22:45:23 crc kubenswrapper[4755]: I0202 22:45:23.295401 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-gs2gv"] Feb 02 22:45:23 crc kubenswrapper[4755]: I0202 22:45:23.331702 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff"] Feb 02 22:45:23 crc kubenswrapper[4755]: W0202 22:45:23.626044 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod425aaf78_7478_49df_822c_f7108a7765bb.slice/crio-4098162ddffb3ac8fee5200f7e2ddf05c36a17c871eb75e5084dd4a014279ea1 WatchSource:0}: Error finding container 4098162ddffb3ac8fee5200f7e2ddf05c36a17c871eb75e5084dd4a014279ea1: Status 404 returned error can't find the container with id 4098162ddffb3ac8fee5200f7e2ddf05c36a17c871eb75e5084dd4a014279ea1 Feb 02 22:45:24 crc kubenswrapper[4755]: I0202 22:45:24.200043 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" event={"ID":"c62fe136-2618-4f3a-b6d0-b6d35df54f4c","Type":"ContainerStarted","Data":"0b0508a593721dc8f297a9dd28da641687f1c738bfbb28569ebb1b5a3721dabd"} Feb 02 22:45:24 crc kubenswrapper[4755]: I0202 22:45:24.202091 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" event={"ID":"5a9aeb48-ea8d-4648-a311-263475a2738c","Type":"ContainerStarted","Data":"f3b472fde7eb4484b253794a51bb99c096e54ceb2927d914aea728066dc547f8"} Feb 02 22:45:24 crc kubenswrapper[4755]: I0202 22:45:24.202172 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:45:24 crc kubenswrapper[4755]: I0202 22:45:24.207000 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" event={"ID":"425aaf78-7478-49df-822c-f7108a7765bb","Type":"ContainerStarted","Data":"b78cb1dd6b1b24acc87d153f02f1c30a3fa0f4d491ecb68941024b1666caa567"} Feb 02 22:45:24 crc kubenswrapper[4755]: I0202 22:45:24.207039 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" event={"ID":"425aaf78-7478-49df-822c-f7108a7765bb","Type":"ContainerStarted","Data":"4098162ddffb3ac8fee5200f7e2ddf05c36a17c871eb75e5084dd4a014279ea1"} Feb 02 22:45:24 crc kubenswrapper[4755]: I0202 22:45:24.222866 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" podStartSLOduration=31.06974479 podStartE2EDuration="33.222850734s" podCreationTimestamp="2026-02-02 22:44:51 +0000 UTC" firstStartedPulling="2026-02-02 22:45:21.567913568 +0000 UTC m=+677.259133894" lastFinishedPulling="2026-02-02 22:45:23.721019512 +0000 UTC m=+679.412239838" observedRunningTime="2026-02-02 22:45:24.222547915 +0000 UTC m=+679.913768251" watchObservedRunningTime="2026-02-02 22:45:24.222850734 +0000 UTC m=+679.914071060" Feb 02 22:45:24 crc kubenswrapper[4755]: I0202 22:45:24.246825 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff" podStartSLOduration=34.246800066 podStartE2EDuration="34.246800066s" podCreationTimestamp="2026-02-02 22:44:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:45:24.236664841 +0000 UTC m=+679.927885197" watchObservedRunningTime="2026-02-02 22:45:24.246800066 +0000 UTC m=+679.938020392" Feb 02 22:45:25 crc kubenswrapper[4755]: I0202 22:45:25.068237 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:45:25 crc kubenswrapper[4755]: I0202 22:45:25.071567 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" Feb 02 22:45:25 crc kubenswrapper[4755]: I0202 22:45:25.275434 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx"] Feb 02 22:45:25 crc kubenswrapper[4755]: W0202 22:45:25.281644 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c08ecd8_3885_49b8_af63_b0ffee6b10ef.slice/crio-51bd56a402294fb77e8009eb001141db7a409f02781055d6d32cd56a10a05d29 WatchSource:0}: Error finding container 51bd56a402294fb77e8009eb001141db7a409f02781055d6d32cd56a10a05d29: Status 404 returned error can't find the container with id 51bd56a402294fb77e8009eb001141db7a409f02781055d6d32cd56a10a05d29 Feb 02 22:45:26 crc kubenswrapper[4755]: I0202 22:45:26.068912 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:26 crc kubenswrapper[4755]: I0202 22:45:26.069645 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:26 crc kubenswrapper[4755]: I0202 22:45:26.232643 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" event={"ID":"2c08ecd8-3885-49b8-af63-b0ffee6b10ef","Type":"ContainerStarted","Data":"51bd56a402294fb77e8009eb001141db7a409f02781055d6d32cd56a10a05d29"} Feb 02 22:45:26 crc kubenswrapper[4755]: I0202 22:45:26.284889 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc"] Feb 02 22:45:26 crc kubenswrapper[4755]: W0202 22:45:26.302872 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f623968_924c_4daa_acc5_9dcc77105d07.slice/crio-3de7708328a9bfcb50fe4bb1f46e215c5cd3a2d0de32458fa0db81cf9045d86f WatchSource:0}: Error finding container 3de7708328a9bfcb50fe4bb1f46e215c5cd3a2d0de32458fa0db81cf9045d86f: Status 404 returned error can't find the container with id 3de7708328a9bfcb50fe4bb1f46e215c5cd3a2d0de32458fa0db81cf9045d86f Feb 02 22:45:27 crc kubenswrapper[4755]: I0202 22:45:27.249937 4755 generic.go:334] "Generic (PLEG): container finished" podID="6f623968-924c-4daa-acc5-9dcc77105d07" containerID="18facd827b868c3e5eafa2e3ad15d359b5152ca0738a65615eaf7a3707e8a709" exitCode=0 Feb 02 22:45:27 crc kubenswrapper[4755]: I0202 22:45:27.250073 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" event={"ID":"6f623968-924c-4daa-acc5-9dcc77105d07","Type":"ContainerDied","Data":"18facd827b868c3e5eafa2e3ad15d359b5152ca0738a65615eaf7a3707e8a709"} Feb 02 22:45:27 crc kubenswrapper[4755]: I0202 22:45:27.250199 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" event={"ID":"6f623968-924c-4daa-acc5-9dcc77105d07","Type":"ContainerStarted","Data":"3de7708328a9bfcb50fe4bb1f46e215c5cd3a2d0de32458fa0db81cf9045d86f"} Feb 02 22:45:29 crc kubenswrapper[4755]: I0202 22:45:29.553107 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:29 crc kubenswrapper[4755]: I0202 22:45:29.558684 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f623968-924c-4daa-acc5-9dcc77105d07-config-volume\") pod \"6f623968-924c-4daa-acc5-9dcc77105d07\" (UID: \"6f623968-924c-4daa-acc5-9dcc77105d07\") " Feb 02 22:45:29 crc kubenswrapper[4755]: I0202 22:45:29.558910 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkm8l\" (UniqueName: \"kubernetes.io/projected/6f623968-924c-4daa-acc5-9dcc77105d07-kube-api-access-zkm8l\") pod \"6f623968-924c-4daa-acc5-9dcc77105d07\" (UID: \"6f623968-924c-4daa-acc5-9dcc77105d07\") " Feb 02 22:45:29 crc kubenswrapper[4755]: I0202 22:45:29.558964 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f623968-924c-4daa-acc5-9dcc77105d07-secret-volume\") pod \"6f623968-924c-4daa-acc5-9dcc77105d07\" (UID: \"6f623968-924c-4daa-acc5-9dcc77105d07\") " Feb 02 22:45:29 crc kubenswrapper[4755]: I0202 22:45:29.559446 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f623968-924c-4daa-acc5-9dcc77105d07-config-volume" (OuterVolumeSpecName: "config-volume") pod "6f623968-924c-4daa-acc5-9dcc77105d07" (UID: "6f623968-924c-4daa-acc5-9dcc77105d07"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:45:29 crc kubenswrapper[4755]: I0202 22:45:29.594845 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f623968-924c-4daa-acc5-9dcc77105d07-kube-api-access-zkm8l" (OuterVolumeSpecName: "kube-api-access-zkm8l") pod "6f623968-924c-4daa-acc5-9dcc77105d07" (UID: "6f623968-924c-4daa-acc5-9dcc77105d07"). InnerVolumeSpecName "kube-api-access-zkm8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:45:29 crc kubenswrapper[4755]: I0202 22:45:29.595419 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f623968-924c-4daa-acc5-9dcc77105d07-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6f623968-924c-4daa-acc5-9dcc77105d07" (UID: "6f623968-924c-4daa-acc5-9dcc77105d07"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:45:29 crc kubenswrapper[4755]: I0202 22:45:29.660011 4755 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f623968-924c-4daa-acc5-9dcc77105d07-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 22:45:29 crc kubenswrapper[4755]: I0202 22:45:29.660061 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkm8l\" (UniqueName: \"kubernetes.io/projected/6f623968-924c-4daa-acc5-9dcc77105d07-kube-api-access-zkm8l\") on node \"crc\" DevicePath \"\"" Feb 02 22:45:29 crc kubenswrapper[4755]: I0202 22:45:29.660075 4755 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f623968-924c-4daa-acc5-9dcc77105d07-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 22:45:30 crc kubenswrapper[4755]: I0202 22:45:30.485668 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" event={"ID":"6f623968-924c-4daa-acc5-9dcc77105d07","Type":"ContainerDied","Data":"3de7708328a9bfcb50fe4bb1f46e215c5cd3a2d0de32458fa0db81cf9045d86f"} Feb 02 22:45:30 crc kubenswrapper[4755]: I0202 22:45:30.485953 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3de7708328a9bfcb50fe4bb1f46e215c5cd3a2d0de32458fa0db81cf9045d86f" Feb 02 22:45:30 crc kubenswrapper[4755]: I0202 22:45:30.486031 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc" Feb 02 22:45:31 crc kubenswrapper[4755]: I0202 22:45:31.468725 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-ppqsb" Feb 02 22:45:31 crc kubenswrapper[4755]: I0202 22:45:31.496282 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" event={"ID":"c62fe136-2618-4f3a-b6d0-b6d35df54f4c","Type":"ContainerStarted","Data":"5805ffd8c3a91011d3beb20fbaed2313d31cc9f0732ed37e5f90e163f97cc761"} Feb 02 22:45:31 crc kubenswrapper[4755]: I0202 22:45:31.496594 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:45:31 crc kubenswrapper[4755]: I0202 22:45:31.498757 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" event={"ID":"2c08ecd8-3885-49b8-af63-b0ffee6b10ef","Type":"ContainerStarted","Data":"094239c4e3d10fa64c3e5dcbf8477a6e638df054cfb5e8c32f01e7ae6f556781"} Feb 02 22:45:31 crc kubenswrapper[4755]: I0202 22:45:31.527599 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" podStartSLOduration=33.767332082 podStartE2EDuration="40.527575432s" podCreationTimestamp="2026-02-02 22:44:51 +0000 UTC" firstStartedPulling="2026-02-02 22:45:23.632455779 +0000 UTC m=+679.323676115" lastFinishedPulling="2026-02-02 22:45:30.392699129 +0000 UTC m=+686.083919465" observedRunningTime="2026-02-02 22:45:31.524865306 +0000 UTC m=+687.216085682" watchObservedRunningTime="2026-02-02 22:45:31.527575432 +0000 UTC m=+687.218795788" Feb 02 22:45:31 crc kubenswrapper[4755]: I0202 22:45:31.560129 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-tbnvx" podStartSLOduration=36.466529348 podStartE2EDuration="41.560104264s" podCreationTimestamp="2026-02-02 22:44:50 +0000 UTC" firstStartedPulling="2026-02-02 22:45:25.283983339 +0000 UTC m=+680.975203665" lastFinishedPulling="2026-02-02 22:45:30.377558245 +0000 UTC m=+686.068778581" observedRunningTime="2026-02-02 22:45:31.553938641 +0000 UTC m=+687.245159007" watchObservedRunningTime="2026-02-02 22:45:31.560104264 +0000 UTC m=+687.251324630" Feb 02 22:45:31 crc kubenswrapper[4755]: I0202 22:45:31.570828 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-gs2gv" Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.867614 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-pv6b4"] Feb 02 22:45:39 crc kubenswrapper[4755]: E0202 22:45:39.868584 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f623968-924c-4daa-acc5-9dcc77105d07" containerName="collect-profiles" Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.868600 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f623968-924c-4daa-acc5-9dcc77105d07" containerName="collect-profiles" Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.868850 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f623968-924c-4daa-acc5-9dcc77105d07" containerName="collect-profiles" Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.869494 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pv6b4" Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.878594 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.879365 4755 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-k6htl" Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.886623 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.891281 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-pv6b4"] Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.918449 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-858654f9db-pr5s4"] Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.919219 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-pr5s4" Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.924458 4755 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-lrj5j" Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.941457 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-mz5vq"] Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.942429 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-mz5vq" Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.943956 4755 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-vngbq" Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.945417 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-pr5s4"] Feb 02 22:45:39 crc kubenswrapper[4755]: I0202 22:45:39.951442 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-mz5vq"] Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.006159 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rztzq\" (UniqueName: \"kubernetes.io/projected/6fa294d0-f7ce-49cf-8817-241cb21c6778-kube-api-access-rztzq\") pod \"cert-manager-cainjector-cf98fcc89-pv6b4\" (UID: \"6fa294d0-f7ce-49cf-8817-241cb21c6778\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-pv6b4" Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.006218 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvwqs\" (UniqueName: \"kubernetes.io/projected/727c74a2-1ffc-4659-8f00-95e14c4a266a-kube-api-access-wvwqs\") pod \"cert-manager-858654f9db-pr5s4\" (UID: \"727c74a2-1ffc-4659-8f00-95e14c4a266a\") " pod="cert-manager/cert-manager-858654f9db-pr5s4" Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.107752 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b88c8\" (UniqueName: \"kubernetes.io/projected/fc6bced4-adaf-4d90-b8ac-25c7e5fbe33e-kube-api-access-b88c8\") pod \"cert-manager-webhook-687f57d79b-mz5vq\" (UID: \"fc6bced4-adaf-4d90-b8ac-25c7e5fbe33e\") " pod="cert-manager/cert-manager-webhook-687f57d79b-mz5vq" Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.107812 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rztzq\" (UniqueName: \"kubernetes.io/projected/6fa294d0-f7ce-49cf-8817-241cb21c6778-kube-api-access-rztzq\") pod \"cert-manager-cainjector-cf98fcc89-pv6b4\" (UID: \"6fa294d0-f7ce-49cf-8817-241cb21c6778\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-pv6b4" Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.107845 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvwqs\" (UniqueName: \"kubernetes.io/projected/727c74a2-1ffc-4659-8f00-95e14c4a266a-kube-api-access-wvwqs\") pod \"cert-manager-858654f9db-pr5s4\" (UID: \"727c74a2-1ffc-4659-8f00-95e14c4a266a\") " pod="cert-manager/cert-manager-858654f9db-pr5s4" Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.130691 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rztzq\" (UniqueName: \"kubernetes.io/projected/6fa294d0-f7ce-49cf-8817-241cb21c6778-kube-api-access-rztzq\") pod \"cert-manager-cainjector-cf98fcc89-pv6b4\" (UID: \"6fa294d0-f7ce-49cf-8817-241cb21c6778\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-pv6b4" Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.143666 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvwqs\" (UniqueName: \"kubernetes.io/projected/727c74a2-1ffc-4659-8f00-95e14c4a266a-kube-api-access-wvwqs\") pod \"cert-manager-858654f9db-pr5s4\" (UID: \"727c74a2-1ffc-4659-8f00-95e14c4a266a\") " pod="cert-manager/cert-manager-858654f9db-pr5s4" Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.198049 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pv6b4" Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.208538 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b88c8\" (UniqueName: \"kubernetes.io/projected/fc6bced4-adaf-4d90-b8ac-25c7e5fbe33e-kube-api-access-b88c8\") pod \"cert-manager-webhook-687f57d79b-mz5vq\" (UID: \"fc6bced4-adaf-4d90-b8ac-25c7e5fbe33e\") " pod="cert-manager/cert-manager-webhook-687f57d79b-mz5vq" Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.223251 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b88c8\" (UniqueName: \"kubernetes.io/projected/fc6bced4-adaf-4d90-b8ac-25c7e5fbe33e-kube-api-access-b88c8\") pod \"cert-manager-webhook-687f57d79b-mz5vq\" (UID: \"fc6bced4-adaf-4d90-b8ac-25c7e5fbe33e\") " pod="cert-manager/cert-manager-webhook-687f57d79b-mz5vq" Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.233852 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-pr5s4" Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.253791 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-mz5vq" Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.439249 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-pv6b4"] Feb 02 22:45:40 crc kubenswrapper[4755]: W0202 22:45:40.454875 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fa294d0_f7ce_49cf_8817_241cb21c6778.slice/crio-f6ced5dca59b9c7dcd18ab2903ed3c0b52013a009d4d1f42d2bfbc89047b35d8 WatchSource:0}: Error finding container f6ced5dca59b9c7dcd18ab2903ed3c0b52013a009d4d1f42d2bfbc89047b35d8: Status 404 returned error can't find the container with id f6ced5dca59b9c7dcd18ab2903ed3c0b52013a009d4d1f42d2bfbc89047b35d8 Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.550397 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-pr5s4"] Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.550444 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pv6b4" event={"ID":"6fa294d0-f7ce-49cf-8817-241cb21c6778","Type":"ContainerStarted","Data":"f6ced5dca59b9c7dcd18ab2903ed3c0b52013a009d4d1f42d2bfbc89047b35d8"} Feb 02 22:45:40 crc kubenswrapper[4755]: W0202 22:45:40.552868 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod727c74a2_1ffc_4659_8f00_95e14c4a266a.slice/crio-0edcb0097685ff4badbc0db37b870950d99dcc0f8be2990823f503e12eb90027 WatchSource:0}: Error finding container 0edcb0097685ff4badbc0db37b870950d99dcc0f8be2990823f503e12eb90027: Status 404 returned error can't find the container with id 0edcb0097685ff4badbc0db37b870950d99dcc0f8be2990823f503e12eb90027 Feb 02 22:45:40 crc kubenswrapper[4755]: I0202 22:45:40.701407 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-mz5vq"] Feb 02 22:45:40 crc kubenswrapper[4755]: W0202 22:45:40.707634 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc6bced4_adaf_4d90_b8ac_25c7e5fbe33e.slice/crio-cf82b10f0e878fae05db725e00625d7d2a8648a9d75494b28bbd861dfcf29c01 WatchSource:0}: Error finding container cf82b10f0e878fae05db725e00625d7d2a8648a9d75494b28bbd861dfcf29c01: Status 404 returned error can't find the container with id cf82b10f0e878fae05db725e00625d7d2a8648a9d75494b28bbd861dfcf29c01 Feb 02 22:45:41 crc kubenswrapper[4755]: I0202 22:45:41.555699 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-pr5s4" event={"ID":"727c74a2-1ffc-4659-8f00-95e14c4a266a","Type":"ContainerStarted","Data":"0edcb0097685ff4badbc0db37b870950d99dcc0f8be2990823f503e12eb90027"} Feb 02 22:45:41 crc kubenswrapper[4755]: I0202 22:45:41.556683 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-mz5vq" event={"ID":"fc6bced4-adaf-4d90-b8ac-25c7e5fbe33e","Type":"ContainerStarted","Data":"cf82b10f0e878fae05db725e00625d7d2a8648a9d75494b28bbd861dfcf29c01"} Feb 02 22:45:43 crc kubenswrapper[4755]: I0202 22:45:43.569169 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pv6b4" event={"ID":"6fa294d0-f7ce-49cf-8817-241cb21c6778","Type":"ContainerStarted","Data":"3106f74e8d9ad86a26fc80d743ea52c90800b8f910c23d75a895ff24adf706c6"} Feb 02 22:45:43 crc kubenswrapper[4755]: I0202 22:45:43.583376 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pv6b4" podStartSLOduration=2.16145102 podStartE2EDuration="4.583360802s" podCreationTimestamp="2026-02-02 22:45:39 +0000 UTC" firstStartedPulling="2026-02-02 22:45:40.461975726 +0000 UTC m=+696.153196052" lastFinishedPulling="2026-02-02 22:45:42.883885508 +0000 UTC m=+698.575105834" observedRunningTime="2026-02-02 22:45:43.582453996 +0000 UTC m=+699.273674322" watchObservedRunningTime="2026-02-02 22:45:43.583360802 +0000 UTC m=+699.274581128" Feb 02 22:45:45 crc kubenswrapper[4755]: I0202 22:45:45.583579 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-mz5vq" event={"ID":"fc6bced4-adaf-4d90-b8ac-25c7e5fbe33e","Type":"ContainerStarted","Data":"7e649ce6a47f97680dff105d6a8e77d0ee7e203fafae561a24cd83eafacbc831"} Feb 02 22:45:45 crc kubenswrapper[4755]: I0202 22:45:45.584243 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-687f57d79b-mz5vq" Feb 02 22:45:45 crc kubenswrapper[4755]: I0202 22:45:45.585564 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-pr5s4" event={"ID":"727c74a2-1ffc-4659-8f00-95e14c4a266a","Type":"ContainerStarted","Data":"9e945d67d91d874ac461c2f8ed200147c8331dd13b9e11e5d062457535b86424"} Feb 02 22:45:45 crc kubenswrapper[4755]: I0202 22:45:45.605854 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-687f57d79b-mz5vq" podStartSLOduration=2.301826505 podStartE2EDuration="6.605829432s" podCreationTimestamp="2026-02-02 22:45:39 +0000 UTC" firstStartedPulling="2026-02-02 22:45:40.70963418 +0000 UTC m=+696.400854526" lastFinishedPulling="2026-02-02 22:45:45.013637107 +0000 UTC m=+700.704857453" observedRunningTime="2026-02-02 22:45:45.599402082 +0000 UTC m=+701.290622418" watchObservedRunningTime="2026-02-02 22:45:45.605829432 +0000 UTC m=+701.297049788" Feb 02 22:45:45 crc kubenswrapper[4755]: I0202 22:45:45.657175 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-858654f9db-pr5s4" podStartSLOduration=2.207322705 podStartE2EDuration="6.657157001s" podCreationTimestamp="2026-02-02 22:45:39 +0000 UTC" firstStartedPulling="2026-02-02 22:45:40.556500636 +0000 UTC m=+696.247720962" lastFinishedPulling="2026-02-02 22:45:45.006334922 +0000 UTC m=+700.697555258" observedRunningTime="2026-02-02 22:45:45.654083155 +0000 UTC m=+701.345303481" watchObservedRunningTime="2026-02-02 22:45:45.657157001 +0000 UTC m=+701.348377327" Feb 02 22:45:50 crc kubenswrapper[4755]: I0202 22:45:50.257120 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-687f57d79b-mz5vq" Feb 02 22:45:53 crc kubenswrapper[4755]: I0202 22:45:53.389311 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:45:53 crc kubenswrapper[4755]: I0202 22:45:53.389665 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:46:05 crc kubenswrapper[4755]: I0202 22:46:05.483083 4755 scope.go:117] "RemoveContainer" containerID="ce31c674f686ef143bc7f9daf3a69e2098b613b90096e0fb863cbe306502dcd3" Feb 02 22:46:05 crc kubenswrapper[4755]: I0202 22:46:05.513340 4755 scope.go:117] "RemoveContainer" containerID="6ec3142e7d7f96b1eac97856580859ffdfdec03e2e4b14689d0d2f2b2dd6a891" Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.222541 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk"] Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.224208 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.226188 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.234552 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk"] Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.362275 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/de93e49c-6575-481e-85e1-546d78192cc1-bundle\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk\" (UID: \"de93e49c-6575-481e-85e1-546d78192cc1\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.364309 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qp8zb\" (UniqueName: \"kubernetes.io/projected/de93e49c-6575-481e-85e1-546d78192cc1-kube-api-access-qp8zb\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk\" (UID: \"de93e49c-6575-481e-85e1-546d78192cc1\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.364532 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/de93e49c-6575-481e-85e1-546d78192cc1-util\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk\" (UID: \"de93e49c-6575-481e-85e1-546d78192cc1\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.466099 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/de93e49c-6575-481e-85e1-546d78192cc1-util\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk\" (UID: \"de93e49c-6575-481e-85e1-546d78192cc1\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.466159 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/de93e49c-6575-481e-85e1-546d78192cc1-bundle\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk\" (UID: \"de93e49c-6575-481e-85e1-546d78192cc1\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.466195 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qp8zb\" (UniqueName: \"kubernetes.io/projected/de93e49c-6575-481e-85e1-546d78192cc1-kube-api-access-qp8zb\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk\" (UID: \"de93e49c-6575-481e-85e1-546d78192cc1\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.466840 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/de93e49c-6575-481e-85e1-546d78192cc1-util\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk\" (UID: \"de93e49c-6575-481e-85e1-546d78192cc1\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.466981 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/de93e49c-6575-481e-85e1-546d78192cc1-bundle\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk\" (UID: \"de93e49c-6575-481e-85e1-546d78192cc1\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.487900 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qp8zb\" (UniqueName: \"kubernetes.io/projected/de93e49c-6575-481e-85e1-546d78192cc1-kube-api-access-qp8zb\") pod \"3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk\" (UID: \"de93e49c-6575-481e-85e1-546d78192cc1\") " pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:14 crc kubenswrapper[4755]: I0202 22:46:14.553073 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:15 crc kubenswrapper[4755]: I0202 22:46:15.079506 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk"] Feb 02 22:46:15 crc kubenswrapper[4755]: I0202 22:46:15.818086 4755 generic.go:334] "Generic (PLEG): container finished" podID="de93e49c-6575-481e-85e1-546d78192cc1" containerID="e90292ad7861f5ee1f2782cfe76a0b84f6fd9f423385c7cdcd5524e886b0831c" exitCode=0 Feb 02 22:46:15 crc kubenswrapper[4755]: I0202 22:46:15.818156 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" event={"ID":"de93e49c-6575-481e-85e1-546d78192cc1","Type":"ContainerDied","Data":"e90292ad7861f5ee1f2782cfe76a0b84f6fd9f423385c7cdcd5524e886b0831c"} Feb 02 22:46:15 crc kubenswrapper[4755]: I0202 22:46:15.818481 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" event={"ID":"de93e49c-6575-481e-85e1-546d78192cc1","Type":"ContainerStarted","Data":"69a71b0e9a1243641e4c33acb53d259baab5b2e559fbeae0784ed589113c1bf2"} Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.055760 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.057256 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.060382 4755 reflector.go:368] Caches populated for *v1.Secret from object-"minio-dev"/"default-dockercfg-l5zmn" Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.060505 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.060671 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.079129 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.205512 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mp849\" (UniqueName: \"kubernetes.io/projected/8ba9afc3-e82e-4fea-8f4d-5784906803b4-kube-api-access-mp849\") pod \"minio\" (UID: \"8ba9afc3-e82e-4fea-8f4d-5784906803b4\") " pod="minio-dev/minio" Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.205855 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-61d1352c-08f4-4ed0-a007-6ea214c51b53\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-61d1352c-08f4-4ed0-a007-6ea214c51b53\") pod \"minio\" (UID: \"8ba9afc3-e82e-4fea-8f4d-5784906803b4\") " pod="minio-dev/minio" Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.306602 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mp849\" (UniqueName: \"kubernetes.io/projected/8ba9afc3-e82e-4fea-8f4d-5784906803b4-kube-api-access-mp849\") pod \"minio\" (UID: \"8ba9afc3-e82e-4fea-8f4d-5784906803b4\") " pod="minio-dev/minio" Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.306654 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-61d1352c-08f4-4ed0-a007-6ea214c51b53\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-61d1352c-08f4-4ed0-a007-6ea214c51b53\") pod \"minio\" (UID: \"8ba9afc3-e82e-4fea-8f4d-5784906803b4\") " pod="minio-dev/minio" Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.309283 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.309314 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-61d1352c-08f4-4ed0-a007-6ea214c51b53\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-61d1352c-08f4-4ed0-a007-6ea214c51b53\") pod \"minio\" (UID: \"8ba9afc3-e82e-4fea-8f4d-5784906803b4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2d10e2a5c00e8552eba7cd109d9ca4463861e862fcc84764be8b3c591155ace9/globalmount\"" pod="minio-dev/minio" Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.331560 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mp849\" (UniqueName: \"kubernetes.io/projected/8ba9afc3-e82e-4fea-8f4d-5784906803b4-kube-api-access-mp849\") pod \"minio\" (UID: \"8ba9afc3-e82e-4fea-8f4d-5784906803b4\") " pod="minio-dev/minio" Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.346143 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-61d1352c-08f4-4ed0-a007-6ea214c51b53\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-61d1352c-08f4-4ed0-a007-6ea214c51b53\") pod \"minio\" (UID: \"8ba9afc3-e82e-4fea-8f4d-5784906803b4\") " pod="minio-dev/minio" Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.374416 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.801450 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Feb 02 22:46:17 crc kubenswrapper[4755]: W0202 22:46:17.808364 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ba9afc3_e82e_4fea_8f4d_5784906803b4.slice/crio-8587ced2fcc28d5ba63fa5c31d625a42e2bb729a25fbb2f30db0b7f5b2bf9ffd WatchSource:0}: Error finding container 8587ced2fcc28d5ba63fa5c31d625a42e2bb729a25fbb2f30db0b7f5b2bf9ffd: Status 404 returned error can't find the container with id 8587ced2fcc28d5ba63fa5c31d625a42e2bb729a25fbb2f30db0b7f5b2bf9ffd Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.837596 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"8ba9afc3-e82e-4fea-8f4d-5784906803b4","Type":"ContainerStarted","Data":"8587ced2fcc28d5ba63fa5c31d625a42e2bb729a25fbb2f30db0b7f5b2bf9ffd"} Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.840758 4755 generic.go:334] "Generic (PLEG): container finished" podID="de93e49c-6575-481e-85e1-546d78192cc1" containerID="b7cde4149400fb5a9d1c14d0c143e864cffda3d7a9387eac739f928a0fa0b51f" exitCode=0 Feb 02 22:46:17 crc kubenswrapper[4755]: I0202 22:46:17.840817 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" event={"ID":"de93e49c-6575-481e-85e1-546d78192cc1","Type":"ContainerDied","Data":"b7cde4149400fb5a9d1c14d0c143e864cffda3d7a9387eac739f928a0fa0b51f"} Feb 02 22:46:18 crc kubenswrapper[4755]: I0202 22:46:18.855274 4755 generic.go:334] "Generic (PLEG): container finished" podID="de93e49c-6575-481e-85e1-546d78192cc1" containerID="2b528a258484b21a7032d81528f9a6b37b32e7185d4edd4ffa77bc90bae9d18c" exitCode=0 Feb 02 22:46:18 crc kubenswrapper[4755]: I0202 22:46:18.855453 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" event={"ID":"de93e49c-6575-481e-85e1-546d78192cc1","Type":"ContainerDied","Data":"2b528a258484b21a7032d81528f9a6b37b32e7185d4edd4ffa77bc90bae9d18c"} Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.592009 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.687319 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/de93e49c-6575-481e-85e1-546d78192cc1-util\") pod \"de93e49c-6575-481e-85e1-546d78192cc1\" (UID: \"de93e49c-6575-481e-85e1-546d78192cc1\") " Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.687371 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/de93e49c-6575-481e-85e1-546d78192cc1-bundle\") pod \"de93e49c-6575-481e-85e1-546d78192cc1\" (UID: \"de93e49c-6575-481e-85e1-546d78192cc1\") " Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.687428 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qp8zb\" (UniqueName: \"kubernetes.io/projected/de93e49c-6575-481e-85e1-546d78192cc1-kube-api-access-qp8zb\") pod \"de93e49c-6575-481e-85e1-546d78192cc1\" (UID: \"de93e49c-6575-481e-85e1-546d78192cc1\") " Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.688195 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de93e49c-6575-481e-85e1-546d78192cc1-bundle" (OuterVolumeSpecName: "bundle") pod "de93e49c-6575-481e-85e1-546d78192cc1" (UID: "de93e49c-6575-481e-85e1-546d78192cc1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.695598 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de93e49c-6575-481e-85e1-546d78192cc1-kube-api-access-qp8zb" (OuterVolumeSpecName: "kube-api-access-qp8zb") pod "de93e49c-6575-481e-85e1-546d78192cc1" (UID: "de93e49c-6575-481e-85e1-546d78192cc1"). InnerVolumeSpecName "kube-api-access-qp8zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.789159 4755 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/de93e49c-6575-481e-85e1-546d78192cc1-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.789208 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qp8zb\" (UniqueName: \"kubernetes.io/projected/de93e49c-6575-481e-85e1-546d78192cc1-kube-api-access-qp8zb\") on node \"crc\" DevicePath \"\"" Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.870478 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" event={"ID":"de93e49c-6575-481e-85e1-546d78192cc1","Type":"ContainerDied","Data":"69a71b0e9a1243641e4c33acb53d259baab5b2e559fbeae0784ed589113c1bf2"} Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.870523 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69a71b0e9a1243641e4c33acb53d259baab5b2e559fbeae0784ed589113c1bf2" Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.870814 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk" Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.979845 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de93e49c-6575-481e-85e1-546d78192cc1-util" (OuterVolumeSpecName: "util") pod "de93e49c-6575-481e-85e1-546d78192cc1" (UID: "de93e49c-6575-481e-85e1-546d78192cc1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:46:20 crc kubenswrapper[4755]: I0202 22:46:20.991586 4755 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/de93e49c-6575-481e-85e1-546d78192cc1-util\") on node \"crc\" DevicePath \"\"" Feb 02 22:46:21 crc kubenswrapper[4755]: I0202 22:46:21.879585 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"8ba9afc3-e82e-4fea-8f4d-5784906803b4","Type":"ContainerStarted","Data":"38c9721fea4cc64163d3abd799282c213b38e8c41fa3bb89b478bfbbd7bf2b01"} Feb 02 22:46:21 crc kubenswrapper[4755]: I0202 22:46:21.902050 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=4.838378091 podStartE2EDuration="7.902032344s" podCreationTimestamp="2026-02-02 22:46:14 +0000 UTC" firstStartedPulling="2026-02-02 22:46:17.810482455 +0000 UTC m=+733.501702811" lastFinishedPulling="2026-02-02 22:46:20.874136748 +0000 UTC m=+736.565357064" observedRunningTime="2026-02-02 22:46:21.897026003 +0000 UTC m=+737.588246419" watchObservedRunningTime="2026-02-02 22:46:21.902032344 +0000 UTC m=+737.593252670" Feb 02 22:46:23 crc kubenswrapper[4755]: I0202 22:46:23.389565 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:46:23 crc kubenswrapper[4755]: I0202 22:46:23.389634 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.243533 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5"] Feb 02 22:46:28 crc kubenswrapper[4755]: E0202 22:46:28.244542 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de93e49c-6575-481e-85e1-546d78192cc1" containerName="extract" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.244559 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="de93e49c-6575-481e-85e1-546d78192cc1" containerName="extract" Feb 02 22:46:28 crc kubenswrapper[4755]: E0202 22:46:28.244569 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de93e49c-6575-481e-85e1-546d78192cc1" containerName="pull" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.244577 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="de93e49c-6575-481e-85e1-546d78192cc1" containerName="pull" Feb 02 22:46:28 crc kubenswrapper[4755]: E0202 22:46:28.244601 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de93e49c-6575-481e-85e1-546d78192cc1" containerName="util" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.244610 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="de93e49c-6575-481e-85e1-546d78192cc1" containerName="util" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.245070 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="de93e49c-6575-481e-85e1-546d78192cc1" containerName="extract" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.246154 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.252570 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5"] Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.253305 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.253457 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.253533 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-cv6rm" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.253798 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.254346 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.258034 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.401981 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/290fd246-5e83-433b-8aea-9ae358ae4377-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.402046 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dnr4\" (UniqueName: \"kubernetes.io/projected/290fd246-5e83-433b-8aea-9ae358ae4377-kube-api-access-5dnr4\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.402289 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/290fd246-5e83-433b-8aea-9ae358ae4377-webhook-cert\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.402361 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/290fd246-5e83-433b-8aea-9ae358ae4377-manager-config\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.402390 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/290fd246-5e83-433b-8aea-9ae358ae4377-apiservice-cert\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.504013 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/290fd246-5e83-433b-8aea-9ae358ae4377-apiservice-cert\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.504069 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/290fd246-5e83-433b-8aea-9ae358ae4377-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.504106 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dnr4\" (UniqueName: \"kubernetes.io/projected/290fd246-5e83-433b-8aea-9ae358ae4377-kube-api-access-5dnr4\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.504198 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/290fd246-5e83-433b-8aea-9ae358ae4377-webhook-cert\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.504226 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/290fd246-5e83-433b-8aea-9ae358ae4377-manager-config\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.505165 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/290fd246-5e83-433b-8aea-9ae358ae4377-manager-config\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.510198 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/290fd246-5e83-433b-8aea-9ae358ae4377-apiservice-cert\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.510225 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/290fd246-5e83-433b-8aea-9ae358ae4377-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.516403 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/290fd246-5e83-433b-8aea-9ae358ae4377-webhook-cert\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.521512 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dnr4\" (UniqueName: \"kubernetes.io/projected/290fd246-5e83-433b-8aea-9ae358ae4377-kube-api-access-5dnr4\") pod \"loki-operator-controller-manager-f7c7b88ff-8vtw5\" (UID: \"290fd246-5e83-433b-8aea-9ae358ae4377\") " pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:28 crc kubenswrapper[4755]: I0202 22:46:28.565963 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:29 crc kubenswrapper[4755]: I0202 22:46:29.057103 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5"] Feb 02 22:46:29 crc kubenswrapper[4755]: W0202 22:46:29.064922 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod290fd246_5e83_433b_8aea_9ae358ae4377.slice/crio-f1bf8f546e378ff8da05ef52844e8f38859cc514c0b7a68c6e90e16b68949c50 WatchSource:0}: Error finding container f1bf8f546e378ff8da05ef52844e8f38859cc514c0b7a68c6e90e16b68949c50: Status 404 returned error can't find the container with id f1bf8f546e378ff8da05ef52844e8f38859cc514c0b7a68c6e90e16b68949c50 Feb 02 22:46:29 crc kubenswrapper[4755]: I0202 22:46:29.929762 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" event={"ID":"290fd246-5e83-433b-8aea-9ae358ae4377","Type":"ContainerStarted","Data":"f1bf8f546e378ff8da05ef52844e8f38859cc514c0b7a68c6e90e16b68949c50"} Feb 02 22:46:33 crc kubenswrapper[4755]: I0202 22:46:33.953936 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" event={"ID":"290fd246-5e83-433b-8aea-9ae358ae4377","Type":"ContainerStarted","Data":"49418c59df6e4ee516feafa7cb9553cfb182d7021836d5d10dbc1ed549a5ef7f"} Feb 02 22:46:39 crc kubenswrapper[4755]: I0202 22:46:39.986945 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" event={"ID":"290fd246-5e83-433b-8aea-9ae358ae4377","Type":"ContainerStarted","Data":"fdaf4761cdc27366795380569a61e98f5d525277b8cb8a02511222d109dc535e"} Feb 02 22:46:39 crc kubenswrapper[4755]: I0202 22:46:39.987551 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:39 crc kubenswrapper[4755]: I0202 22:46:39.992317 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" Feb 02 22:46:40 crc kubenswrapper[4755]: I0202 22:46:40.024186 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-f7c7b88ff-8vtw5" podStartSLOduration=1.233208562 podStartE2EDuration="12.024171823s" podCreationTimestamp="2026-02-02 22:46:28 +0000 UTC" firstStartedPulling="2026-02-02 22:46:29.067425953 +0000 UTC m=+744.758646299" lastFinishedPulling="2026-02-02 22:46:39.858389234 +0000 UTC m=+755.549609560" observedRunningTime="2026-02-02 22:46:40.017602878 +0000 UTC m=+755.708823214" watchObservedRunningTime="2026-02-02 22:46:40.024171823 +0000 UTC m=+755.715392149" Feb 02 22:46:49 crc kubenswrapper[4755]: I0202 22:46:49.096028 4755 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 22:46:53 crc kubenswrapper[4755]: I0202 22:46:53.389946 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:46:53 crc kubenswrapper[4755]: I0202 22:46:53.390273 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:46:53 crc kubenswrapper[4755]: I0202 22:46:53.390318 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:46:53 crc kubenswrapper[4755]: I0202 22:46:53.390892 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bb11c6f11cba368ea19d2ffe2c3481fe5fb952a4be61f80011767257620e0091"} pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 22:46:53 crc kubenswrapper[4755]: I0202 22:46:53.390962 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" containerID="cri-o://bb11c6f11cba368ea19d2ffe2c3481fe5fb952a4be61f80011767257620e0091" gracePeriod=600 Feb 02 22:46:54 crc kubenswrapper[4755]: I0202 22:46:54.082707 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerID="bb11c6f11cba368ea19d2ffe2c3481fe5fb952a4be61f80011767257620e0091" exitCode=0 Feb 02 22:46:54 crc kubenswrapper[4755]: I0202 22:46:54.082794 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerDied","Data":"bb11c6f11cba368ea19d2ffe2c3481fe5fb952a4be61f80011767257620e0091"} Feb 02 22:46:54 crc kubenswrapper[4755]: I0202 22:46:54.083146 4755 scope.go:117] "RemoveContainer" containerID="56c4714b35140ee6353c0afeb63cfebceaadf5fa9fb114929c1b4447c72f0448" Feb 02 22:46:55 crc kubenswrapper[4755]: I0202 22:46:55.093912 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"b3e3969df739edd98047f8857204b723c7cae6ce3d65529d90b43e5d926f70bf"} Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.261197 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc"] Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.264714 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.268496 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.287512 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc"] Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.353577 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f0e16a35-fd76-4a88-93c2-7011b557e703-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc\" (UID: \"f0e16a35-fd76-4a88-93c2-7011b557e703\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.353620 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f0e16a35-fd76-4a88-93c2-7011b557e703-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc\" (UID: \"f0e16a35-fd76-4a88-93c2-7011b557e703\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.353982 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnwl2\" (UniqueName: \"kubernetes.io/projected/f0e16a35-fd76-4a88-93c2-7011b557e703-kube-api-access-qnwl2\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc\" (UID: \"f0e16a35-fd76-4a88-93c2-7011b557e703\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.455380 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnwl2\" (UniqueName: \"kubernetes.io/projected/f0e16a35-fd76-4a88-93c2-7011b557e703-kube-api-access-qnwl2\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc\" (UID: \"f0e16a35-fd76-4a88-93c2-7011b557e703\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.455502 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f0e16a35-fd76-4a88-93c2-7011b557e703-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc\" (UID: \"f0e16a35-fd76-4a88-93c2-7011b557e703\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.455540 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f0e16a35-fd76-4a88-93c2-7011b557e703-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc\" (UID: \"f0e16a35-fd76-4a88-93c2-7011b557e703\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.456227 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f0e16a35-fd76-4a88-93c2-7011b557e703-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc\" (UID: \"f0e16a35-fd76-4a88-93c2-7011b557e703\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.456273 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f0e16a35-fd76-4a88-93c2-7011b557e703-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc\" (UID: \"f0e16a35-fd76-4a88-93c2-7011b557e703\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.485619 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnwl2\" (UniqueName: \"kubernetes.io/projected/f0e16a35-fd76-4a88-93c2-7011b557e703-kube-api-access-qnwl2\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc\" (UID: \"f0e16a35-fd76-4a88-93c2-7011b557e703\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.595052 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:13 crc kubenswrapper[4755]: I0202 22:47:13.912904 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc"] Feb 02 22:47:14 crc kubenswrapper[4755]: I0202 22:47:14.225542 4755 generic.go:334] "Generic (PLEG): container finished" podID="f0e16a35-fd76-4a88-93c2-7011b557e703" containerID="5c5fb19b322cb9b4ca86438667b98cb3950734b332612a5e1a7d677e092c1aca" exitCode=0 Feb 02 22:47:14 crc kubenswrapper[4755]: I0202 22:47:14.225595 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" event={"ID":"f0e16a35-fd76-4a88-93c2-7011b557e703","Type":"ContainerDied","Data":"5c5fb19b322cb9b4ca86438667b98cb3950734b332612a5e1a7d677e092c1aca"} Feb 02 22:47:14 crc kubenswrapper[4755]: I0202 22:47:14.225639 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" event={"ID":"f0e16a35-fd76-4a88-93c2-7011b557e703","Type":"ContainerStarted","Data":"af0403b4ed51c68668592722b30cb8db87824e85726bfcc0884bfb715a2aefde"} Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.629683 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zbfzw"] Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.632656 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.653998 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zbfzw"] Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.683791 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51defda8-7ef7-454f-be74-fbd8596bb91c-catalog-content\") pod \"redhat-operators-zbfzw\" (UID: \"51defda8-7ef7-454f-be74-fbd8596bb91c\") " pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.683891 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51defda8-7ef7-454f-be74-fbd8596bb91c-utilities\") pod \"redhat-operators-zbfzw\" (UID: \"51defda8-7ef7-454f-be74-fbd8596bb91c\") " pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.683940 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8jxs\" (UniqueName: \"kubernetes.io/projected/51defda8-7ef7-454f-be74-fbd8596bb91c-kube-api-access-x8jxs\") pod \"redhat-operators-zbfzw\" (UID: \"51defda8-7ef7-454f-be74-fbd8596bb91c\") " pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.785066 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51defda8-7ef7-454f-be74-fbd8596bb91c-utilities\") pod \"redhat-operators-zbfzw\" (UID: \"51defda8-7ef7-454f-be74-fbd8596bb91c\") " pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.785138 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8jxs\" (UniqueName: \"kubernetes.io/projected/51defda8-7ef7-454f-be74-fbd8596bb91c-kube-api-access-x8jxs\") pod \"redhat-operators-zbfzw\" (UID: \"51defda8-7ef7-454f-be74-fbd8596bb91c\") " pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.785190 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51defda8-7ef7-454f-be74-fbd8596bb91c-catalog-content\") pod \"redhat-operators-zbfzw\" (UID: \"51defda8-7ef7-454f-be74-fbd8596bb91c\") " pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.785719 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51defda8-7ef7-454f-be74-fbd8596bb91c-catalog-content\") pod \"redhat-operators-zbfzw\" (UID: \"51defda8-7ef7-454f-be74-fbd8596bb91c\") " pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.786036 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51defda8-7ef7-454f-be74-fbd8596bb91c-utilities\") pod \"redhat-operators-zbfzw\" (UID: \"51defda8-7ef7-454f-be74-fbd8596bb91c\") " pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.806894 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8jxs\" (UniqueName: \"kubernetes.io/projected/51defda8-7ef7-454f-be74-fbd8596bb91c-kube-api-access-x8jxs\") pod \"redhat-operators-zbfzw\" (UID: \"51defda8-7ef7-454f-be74-fbd8596bb91c\") " pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:15 crc kubenswrapper[4755]: I0202 22:47:15.978205 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:16 crc kubenswrapper[4755]: I0202 22:47:16.238852 4755 generic.go:334] "Generic (PLEG): container finished" podID="f0e16a35-fd76-4a88-93c2-7011b557e703" containerID="d97d11c0a24cea960190d3ec25f705f5e6faa244360c3c1a96f0e0c7ac4d323b" exitCode=0 Feb 02 22:47:16 crc kubenswrapper[4755]: I0202 22:47:16.239103 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" event={"ID":"f0e16a35-fd76-4a88-93c2-7011b557e703","Type":"ContainerDied","Data":"d97d11c0a24cea960190d3ec25f705f5e6faa244360c3c1a96f0e0c7ac4d323b"} Feb 02 22:47:16 crc kubenswrapper[4755]: W0202 22:47:16.400160 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51defda8_7ef7_454f_be74_fbd8596bb91c.slice/crio-2e92f50bcd4497cde11ee586c23b7d3088e10cca90437697ec63687fc798e5e8 WatchSource:0}: Error finding container 2e92f50bcd4497cde11ee586c23b7d3088e10cca90437697ec63687fc798e5e8: Status 404 returned error can't find the container with id 2e92f50bcd4497cde11ee586c23b7d3088e10cca90437697ec63687fc798e5e8 Feb 02 22:47:16 crc kubenswrapper[4755]: I0202 22:47:16.401839 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zbfzw"] Feb 02 22:47:17 crc kubenswrapper[4755]: I0202 22:47:17.246607 4755 generic.go:334] "Generic (PLEG): container finished" podID="51defda8-7ef7-454f-be74-fbd8596bb91c" containerID="a269de5b07166ac0557aab125422f6918139c3c09128af0cfbabde823d6cb5ae" exitCode=0 Feb 02 22:47:17 crc kubenswrapper[4755]: I0202 22:47:17.246682 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zbfzw" event={"ID":"51defda8-7ef7-454f-be74-fbd8596bb91c","Type":"ContainerDied","Data":"a269de5b07166ac0557aab125422f6918139c3c09128af0cfbabde823d6cb5ae"} Feb 02 22:47:17 crc kubenswrapper[4755]: I0202 22:47:17.246714 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zbfzw" event={"ID":"51defda8-7ef7-454f-be74-fbd8596bb91c","Type":"ContainerStarted","Data":"2e92f50bcd4497cde11ee586c23b7d3088e10cca90437697ec63687fc798e5e8"} Feb 02 22:47:17 crc kubenswrapper[4755]: I0202 22:47:17.248850 4755 generic.go:334] "Generic (PLEG): container finished" podID="f0e16a35-fd76-4a88-93c2-7011b557e703" containerID="3653777ee9d66640286e6ea80c69d966b2963c207015cc776e51cdfe1d282394" exitCode=0 Feb 02 22:47:17 crc kubenswrapper[4755]: I0202 22:47:17.248920 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" event={"ID":"f0e16a35-fd76-4a88-93c2-7011b557e703","Type":"ContainerDied","Data":"3653777ee9d66640286e6ea80c69d966b2963c207015cc776e51cdfe1d282394"} Feb 02 22:47:18 crc kubenswrapper[4755]: I0202 22:47:18.261403 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zbfzw" event={"ID":"51defda8-7ef7-454f-be74-fbd8596bb91c","Type":"ContainerStarted","Data":"896100609d854c5a1570416e1b5a3baf3976bdc450dabeb1a138c0320c15b60e"} Feb 02 22:47:18 crc kubenswrapper[4755]: I0202 22:47:18.656678 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:18 crc kubenswrapper[4755]: I0202 22:47:18.719571 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f0e16a35-fd76-4a88-93c2-7011b557e703-bundle\") pod \"f0e16a35-fd76-4a88-93c2-7011b557e703\" (UID: \"f0e16a35-fd76-4a88-93c2-7011b557e703\") " Feb 02 22:47:18 crc kubenswrapper[4755]: I0202 22:47:18.719642 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnwl2\" (UniqueName: \"kubernetes.io/projected/f0e16a35-fd76-4a88-93c2-7011b557e703-kube-api-access-qnwl2\") pod \"f0e16a35-fd76-4a88-93c2-7011b557e703\" (UID: \"f0e16a35-fd76-4a88-93c2-7011b557e703\") " Feb 02 22:47:18 crc kubenswrapper[4755]: I0202 22:47:18.720180 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0e16a35-fd76-4a88-93c2-7011b557e703-bundle" (OuterVolumeSpecName: "bundle") pod "f0e16a35-fd76-4a88-93c2-7011b557e703" (UID: "f0e16a35-fd76-4a88-93c2-7011b557e703"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:47:18 crc kubenswrapper[4755]: I0202 22:47:18.720431 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f0e16a35-fd76-4a88-93c2-7011b557e703-util\") pod \"f0e16a35-fd76-4a88-93c2-7011b557e703\" (UID: \"f0e16a35-fd76-4a88-93c2-7011b557e703\") " Feb 02 22:47:18 crc kubenswrapper[4755]: I0202 22:47:18.720616 4755 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f0e16a35-fd76-4a88-93c2-7011b557e703-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:47:18 crc kubenswrapper[4755]: I0202 22:47:18.724145 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0e16a35-fd76-4a88-93c2-7011b557e703-kube-api-access-qnwl2" (OuterVolumeSpecName: "kube-api-access-qnwl2") pod "f0e16a35-fd76-4a88-93c2-7011b557e703" (UID: "f0e16a35-fd76-4a88-93c2-7011b557e703"). InnerVolumeSpecName "kube-api-access-qnwl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:47:18 crc kubenswrapper[4755]: I0202 22:47:18.739191 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0e16a35-fd76-4a88-93c2-7011b557e703-util" (OuterVolumeSpecName: "util") pod "f0e16a35-fd76-4a88-93c2-7011b557e703" (UID: "f0e16a35-fd76-4a88-93c2-7011b557e703"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:47:18 crc kubenswrapper[4755]: I0202 22:47:18.822041 4755 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f0e16a35-fd76-4a88-93c2-7011b557e703-util\") on node \"crc\" DevicePath \"\"" Feb 02 22:47:18 crc kubenswrapper[4755]: I0202 22:47:18.822094 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnwl2\" (UniqueName: \"kubernetes.io/projected/f0e16a35-fd76-4a88-93c2-7011b557e703-kube-api-access-qnwl2\") on node \"crc\" DevicePath \"\"" Feb 02 22:47:19 crc kubenswrapper[4755]: I0202 22:47:19.273350 4755 generic.go:334] "Generic (PLEG): container finished" podID="51defda8-7ef7-454f-be74-fbd8596bb91c" containerID="896100609d854c5a1570416e1b5a3baf3976bdc450dabeb1a138c0320c15b60e" exitCode=0 Feb 02 22:47:19 crc kubenswrapper[4755]: I0202 22:47:19.273441 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zbfzw" event={"ID":"51defda8-7ef7-454f-be74-fbd8596bb91c","Type":"ContainerDied","Data":"896100609d854c5a1570416e1b5a3baf3976bdc450dabeb1a138c0320c15b60e"} Feb 02 22:47:19 crc kubenswrapper[4755]: I0202 22:47:19.282346 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" event={"ID":"f0e16a35-fd76-4a88-93c2-7011b557e703","Type":"ContainerDied","Data":"af0403b4ed51c68668592722b30cb8db87824e85726bfcc0884bfb715a2aefde"} Feb 02 22:47:19 crc kubenswrapper[4755]: I0202 22:47:19.282419 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af0403b4ed51c68668592722b30cb8db87824e85726bfcc0884bfb715a2aefde" Feb 02 22:47:19 crc kubenswrapper[4755]: I0202 22:47:19.282529 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc" Feb 02 22:47:20 crc kubenswrapper[4755]: I0202 22:47:20.290273 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zbfzw" event={"ID":"51defda8-7ef7-454f-be74-fbd8596bb91c","Type":"ContainerStarted","Data":"516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93"} Feb 02 22:47:20 crc kubenswrapper[4755]: I0202 22:47:20.305205 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zbfzw" podStartSLOduration=2.8741507520000003 podStartE2EDuration="5.305187547s" podCreationTimestamp="2026-02-02 22:47:15 +0000 UTC" firstStartedPulling="2026-02-02 22:47:17.249066973 +0000 UTC m=+792.940287309" lastFinishedPulling="2026-02-02 22:47:19.680103748 +0000 UTC m=+795.371324104" observedRunningTime="2026-02-02 22:47:20.304892729 +0000 UTC m=+795.996113095" watchObservedRunningTime="2026-02-02 22:47:20.305187547 +0000 UTC m=+795.996407873" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.143856 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-vsn87"] Feb 02 22:47:23 crc kubenswrapper[4755]: E0202 22:47:23.144095 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0e16a35-fd76-4a88-93c2-7011b557e703" containerName="pull" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.144109 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0e16a35-fd76-4a88-93c2-7011b557e703" containerName="pull" Feb 02 22:47:23 crc kubenswrapper[4755]: E0202 22:47:23.144119 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0e16a35-fd76-4a88-93c2-7011b557e703" containerName="util" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.144127 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0e16a35-fd76-4a88-93c2-7011b557e703" containerName="util" Feb 02 22:47:23 crc kubenswrapper[4755]: E0202 22:47:23.144142 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0e16a35-fd76-4a88-93c2-7011b557e703" containerName="extract" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.144150 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0e16a35-fd76-4a88-93c2-7011b557e703" containerName="extract" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.144276 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0e16a35-fd76-4a88-93c2-7011b557e703" containerName="extract" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.144716 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-vsn87" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.146898 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-89mqc" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.147181 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.149779 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.167435 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-vsn87"] Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.176634 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wmjx\" (UniqueName: \"kubernetes.io/projected/930aa2ef-4ef3-4759-9906-9cbafaf06970-kube-api-access-2wmjx\") pod \"nmstate-operator-646758c888-vsn87\" (UID: \"930aa2ef-4ef3-4759-9906-9cbafaf06970\") " pod="openshift-nmstate/nmstate-operator-646758c888-vsn87" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.278250 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wmjx\" (UniqueName: \"kubernetes.io/projected/930aa2ef-4ef3-4759-9906-9cbafaf06970-kube-api-access-2wmjx\") pod \"nmstate-operator-646758c888-vsn87\" (UID: \"930aa2ef-4ef3-4759-9906-9cbafaf06970\") " pod="openshift-nmstate/nmstate-operator-646758c888-vsn87" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.304607 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wmjx\" (UniqueName: \"kubernetes.io/projected/930aa2ef-4ef3-4759-9906-9cbafaf06970-kube-api-access-2wmjx\") pod \"nmstate-operator-646758c888-vsn87\" (UID: \"930aa2ef-4ef3-4759-9906-9cbafaf06970\") " pod="openshift-nmstate/nmstate-operator-646758c888-vsn87" Feb 02 22:47:23 crc kubenswrapper[4755]: I0202 22:47:23.467164 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-vsn87" Feb 02 22:47:24 crc kubenswrapper[4755]: I0202 22:47:24.011756 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-vsn87"] Feb 02 22:47:24 crc kubenswrapper[4755]: I0202 22:47:24.312214 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-vsn87" event={"ID":"930aa2ef-4ef3-4759-9906-9cbafaf06970","Type":"ContainerStarted","Data":"6fc4c62e81d96a88276a343512d994c3ee30e2d750337cf5c8668396d54a4c59"} Feb 02 22:47:25 crc kubenswrapper[4755]: I0202 22:47:25.979212 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:25 crc kubenswrapper[4755]: I0202 22:47:25.979773 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:27 crc kubenswrapper[4755]: I0202 22:47:27.022203 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zbfzw" podUID="51defda8-7ef7-454f-be74-fbd8596bb91c" containerName="registry-server" probeResult="failure" output=< Feb 02 22:47:27 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Feb 02 22:47:27 crc kubenswrapper[4755]: > Feb 02 22:47:27 crc kubenswrapper[4755]: I0202 22:47:27.333986 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-vsn87" event={"ID":"930aa2ef-4ef3-4759-9906-9cbafaf06970","Type":"ContainerStarted","Data":"c54157f7379a7ac38d678f2c7c6e7145ff4177066b4d80131b98e60eda3a34e2"} Feb 02 22:47:27 crc kubenswrapper[4755]: I0202 22:47:27.367146 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-vsn87" podStartSLOduration=1.854021896 podStartE2EDuration="4.367118965s" podCreationTimestamp="2026-02-02 22:47:23 +0000 UTC" firstStartedPulling="2026-02-02 22:47:24.019424441 +0000 UTC m=+799.710644767" lastFinishedPulling="2026-02-02 22:47:26.53252151 +0000 UTC m=+802.223741836" observedRunningTime="2026-02-02 22:47:27.360534459 +0000 UTC m=+803.051754785" watchObservedRunningTime="2026-02-02 22:47:27.367118965 +0000 UTC m=+803.058339321" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.401226 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-z6gz8"] Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.402288 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-z6gz8" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.404577 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-zgmvn" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.418847 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-z6gz8"] Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.422484 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv"] Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.423175 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.427386 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.435289 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv"] Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.450988 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-sj7f5"] Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.452257 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcjmg\" (UniqueName: \"kubernetes.io/projected/329545ba-b43d-4600-bc08-84159813a2e4-kube-api-access-wcjmg\") pod \"nmstate-metrics-54757c584b-z6gz8\" (UID: \"329545ba-b43d-4600-bc08-84159813a2e4\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-z6gz8" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.452303 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k88dd\" (UniqueName: \"kubernetes.io/projected/bea73b39-f5c5-4290-a4f5-c1338552023f-kube-api-access-k88dd\") pod \"nmstate-webhook-8474b5b9d8-xtzkv\" (UID: \"bea73b39-f5c5-4290-a4f5-c1338552023f\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.452373 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/bea73b39-f5c5-4290-a4f5-c1338552023f-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-xtzkv\" (UID: \"bea73b39-f5c5-4290-a4f5-c1338552023f\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.455595 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.548721 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx"] Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.550161 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.555416 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-xl9nk" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.555576 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.555693 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/bea73b39-f5c5-4290-a4f5-c1338552023f-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-xtzkv\" (UID: \"bea73b39-f5c5-4290-a4f5-c1338552023f\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.555799 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcjmg\" (UniqueName: \"kubernetes.io/projected/329545ba-b43d-4600-bc08-84159813a2e4-kube-api-access-wcjmg\") pod \"nmstate-metrics-54757c584b-z6gz8\" (UID: \"329545ba-b43d-4600-bc08-84159813a2e4\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-z6gz8" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.555834 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k88dd\" (UniqueName: \"kubernetes.io/projected/bea73b39-f5c5-4290-a4f5-c1338552023f-kube-api-access-k88dd\") pod \"nmstate-webhook-8474b5b9d8-xtzkv\" (UID: \"bea73b39-f5c5-4290-a4f5-c1338552023f\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.556267 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Feb 02 22:47:28 crc kubenswrapper[4755]: E0202 22:47:28.557532 4755 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Feb 02 22:47:28 crc kubenswrapper[4755]: E0202 22:47:28.557592 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bea73b39-f5c5-4290-a4f5-c1338552023f-tls-key-pair podName:bea73b39-f5c5-4290-a4f5-c1338552023f nodeName:}" failed. No retries permitted until 2026-02-02 22:47:29.05757171 +0000 UTC m=+804.748792036 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/bea73b39-f5c5-4290-a4f5-c1338552023f-tls-key-pair") pod "nmstate-webhook-8474b5b9d8-xtzkv" (UID: "bea73b39-f5c5-4290-a4f5-c1338552023f") : secret "openshift-nmstate-webhook" not found Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.561624 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx"] Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.583169 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k88dd\" (UniqueName: \"kubernetes.io/projected/bea73b39-f5c5-4290-a4f5-c1338552023f-kube-api-access-k88dd\") pod \"nmstate-webhook-8474b5b9d8-xtzkv\" (UID: \"bea73b39-f5c5-4290-a4f5-c1338552023f\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.589935 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcjmg\" (UniqueName: \"kubernetes.io/projected/329545ba-b43d-4600-bc08-84159813a2e4-kube-api-access-wcjmg\") pod \"nmstate-metrics-54757c584b-z6gz8\" (UID: \"329545ba-b43d-4600-bc08-84159813a2e4\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-z6gz8" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.656908 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/64c0f9fe-4e93-4135-bd8a-88e659d417d9-dbus-socket\") pod \"nmstate-handler-sj7f5\" (UID: \"64c0f9fe-4e93-4135-bd8a-88e659d417d9\") " pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.656963 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/64c0f9fe-4e93-4135-bd8a-88e659d417d9-nmstate-lock\") pod \"nmstate-handler-sj7f5\" (UID: \"64c0f9fe-4e93-4135-bd8a-88e659d417d9\") " pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.656996 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slq2c\" (UniqueName: \"kubernetes.io/projected/64c0f9fe-4e93-4135-bd8a-88e659d417d9-kube-api-access-slq2c\") pod \"nmstate-handler-sj7f5\" (UID: \"64c0f9fe-4e93-4135-bd8a-88e659d417d9\") " pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.657015 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/64c0f9fe-4e93-4135-bd8a-88e659d417d9-ovs-socket\") pod \"nmstate-handler-sj7f5\" (UID: \"64c0f9fe-4e93-4135-bd8a-88e659d417d9\") " pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.657122 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5e6d05f8-65b8-49af-a844-b8bac61552c2-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-ht2qx\" (UID: \"5e6d05f8-65b8-49af-a844-b8bac61552c2\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.657228 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkxhp\" (UniqueName: \"kubernetes.io/projected/5e6d05f8-65b8-49af-a844-b8bac61552c2-kube-api-access-tkxhp\") pod \"nmstate-console-plugin-7754f76f8b-ht2qx\" (UID: \"5e6d05f8-65b8-49af-a844-b8bac61552c2\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.657378 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5e6d05f8-65b8-49af-a844-b8bac61552c2-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-ht2qx\" (UID: \"5e6d05f8-65b8-49af-a844-b8bac61552c2\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.758003 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkxhp\" (UniqueName: \"kubernetes.io/projected/5e6d05f8-65b8-49af-a844-b8bac61552c2-kube-api-access-tkxhp\") pod \"nmstate-console-plugin-7754f76f8b-ht2qx\" (UID: \"5e6d05f8-65b8-49af-a844-b8bac61552c2\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.758110 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5e6d05f8-65b8-49af-a844-b8bac61552c2-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-ht2qx\" (UID: \"5e6d05f8-65b8-49af-a844-b8bac61552c2\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.758132 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/64c0f9fe-4e93-4135-bd8a-88e659d417d9-dbus-socket\") pod \"nmstate-handler-sj7f5\" (UID: \"64c0f9fe-4e93-4135-bd8a-88e659d417d9\") " pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.758162 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/64c0f9fe-4e93-4135-bd8a-88e659d417d9-nmstate-lock\") pod \"nmstate-handler-sj7f5\" (UID: \"64c0f9fe-4e93-4135-bd8a-88e659d417d9\") " pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.758195 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slq2c\" (UniqueName: \"kubernetes.io/projected/64c0f9fe-4e93-4135-bd8a-88e659d417d9-kube-api-access-slq2c\") pod \"nmstate-handler-sj7f5\" (UID: \"64c0f9fe-4e93-4135-bd8a-88e659d417d9\") " pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.758217 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/64c0f9fe-4e93-4135-bd8a-88e659d417d9-ovs-socket\") pod \"nmstate-handler-sj7f5\" (UID: \"64c0f9fe-4e93-4135-bd8a-88e659d417d9\") " pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.758245 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5e6d05f8-65b8-49af-a844-b8bac61552c2-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-ht2qx\" (UID: \"5e6d05f8-65b8-49af-a844-b8bac61552c2\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.758291 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/64c0f9fe-4e93-4135-bd8a-88e659d417d9-nmstate-lock\") pod \"nmstate-handler-sj7f5\" (UID: \"64c0f9fe-4e93-4135-bd8a-88e659d417d9\") " pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.758507 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/64c0f9fe-4e93-4135-bd8a-88e659d417d9-ovs-socket\") pod \"nmstate-handler-sj7f5\" (UID: \"64c0f9fe-4e93-4135-bd8a-88e659d417d9\") " pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.758591 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/64c0f9fe-4e93-4135-bd8a-88e659d417d9-dbus-socket\") pod \"nmstate-handler-sj7f5\" (UID: \"64c0f9fe-4e93-4135-bd8a-88e659d417d9\") " pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.759533 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5e6d05f8-65b8-49af-a844-b8bac61552c2-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-ht2qx\" (UID: \"5e6d05f8-65b8-49af-a844-b8bac61552c2\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.761622 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5e6d05f8-65b8-49af-a844-b8bac61552c2-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-ht2qx\" (UID: \"5e6d05f8-65b8-49af-a844-b8bac61552c2\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.775094 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkxhp\" (UniqueName: \"kubernetes.io/projected/5e6d05f8-65b8-49af-a844-b8bac61552c2-kube-api-access-tkxhp\") pod \"nmstate-console-plugin-7754f76f8b-ht2qx\" (UID: \"5e6d05f8-65b8-49af-a844-b8bac61552c2\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.778087 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slq2c\" (UniqueName: \"kubernetes.io/projected/64c0f9fe-4e93-4135-bd8a-88e659d417d9-kube-api-access-slq2c\") pod \"nmstate-handler-sj7f5\" (UID: \"64c0f9fe-4e93-4135-bd8a-88e659d417d9\") " pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.826378 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-z6gz8" Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.826425 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:28 crc kubenswrapper[4755]: W0202 22:47:28.866888 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64c0f9fe_4e93_4135_bd8a_88e659d417d9.slice/crio-5ca8f48be45354544061c9ab77ae312b2d3306cf41f925331f44fc40fe2ec841 WatchSource:0}: Error finding container 5ca8f48be45354544061c9ab77ae312b2d3306cf41f925331f44fc40fe2ec841: Status 404 returned error can't find the container with id 5ca8f48be45354544061c9ab77ae312b2d3306cf41f925331f44fc40fe2ec841 Feb 02 22:47:28 crc kubenswrapper[4755]: I0202 22:47:28.877074 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.007585 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-858d8647f5-gpgwq"] Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.008548 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.024144 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-858d8647f5-gpgwq"] Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.068339 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8489d677-75be-48e2-a963-0e4117f5ca06-console-serving-cert\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.068391 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8489d677-75be-48e2-a963-0e4117f5ca06-trusted-ca-bundle\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.068433 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/bea73b39-f5c5-4290-a4f5-c1338552023f-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-xtzkv\" (UID: \"bea73b39-f5c5-4290-a4f5-c1338552023f\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.068459 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2pn8\" (UniqueName: \"kubernetes.io/projected/8489d677-75be-48e2-a963-0e4117f5ca06-kube-api-access-s2pn8\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.068480 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8489d677-75be-48e2-a963-0e4117f5ca06-console-config\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.068499 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8489d677-75be-48e2-a963-0e4117f5ca06-oauth-serving-cert\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.068522 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8489d677-75be-48e2-a963-0e4117f5ca06-console-oauth-config\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.068553 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8489d677-75be-48e2-a963-0e4117f5ca06-service-ca\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.073094 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/bea73b39-f5c5-4290-a4f5-c1338552023f-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-xtzkv\" (UID: \"bea73b39-f5c5-4290-a4f5-c1338552023f\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.083945 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.156374 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-z6gz8"] Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.169481 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2pn8\" (UniqueName: \"kubernetes.io/projected/8489d677-75be-48e2-a963-0e4117f5ca06-kube-api-access-s2pn8\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.169525 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8489d677-75be-48e2-a963-0e4117f5ca06-console-config\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.169547 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8489d677-75be-48e2-a963-0e4117f5ca06-oauth-serving-cert\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.169573 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8489d677-75be-48e2-a963-0e4117f5ca06-console-oauth-config\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.171693 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8489d677-75be-48e2-a963-0e4117f5ca06-oauth-serving-cert\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.172063 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8489d677-75be-48e2-a963-0e4117f5ca06-service-ca\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.172306 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8489d677-75be-48e2-a963-0e4117f5ca06-console-serving-cert\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.172354 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8489d677-75be-48e2-a963-0e4117f5ca06-trusted-ca-bundle\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: W0202 22:47:29.174488 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod329545ba_b43d_4600_bc08_84159813a2e4.slice/crio-24ad736ee2e0b215364274bef2a49c763abeb699c6e48e19c4cb3b78fdc67e56 WatchSource:0}: Error finding container 24ad736ee2e0b215364274bef2a49c763abeb699c6e48e19c4cb3b78fdc67e56: Status 404 returned error can't find the container with id 24ad736ee2e0b215364274bef2a49c763abeb699c6e48e19c4cb3b78fdc67e56 Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.177292 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8489d677-75be-48e2-a963-0e4117f5ca06-console-config\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.190493 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8489d677-75be-48e2-a963-0e4117f5ca06-service-ca\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.192678 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8489d677-75be-48e2-a963-0e4117f5ca06-trusted-ca-bundle\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.193443 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8489d677-75be-48e2-a963-0e4117f5ca06-console-oauth-config\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.193949 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8489d677-75be-48e2-a963-0e4117f5ca06-console-serving-cert\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.194615 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2pn8\" (UniqueName: \"kubernetes.io/projected/8489d677-75be-48e2-a963-0e4117f5ca06-kube-api-access-s2pn8\") pod \"console-858d8647f5-gpgwq\" (UID: \"8489d677-75be-48e2-a963-0e4117f5ca06\") " pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.233750 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx"] Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.287802 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv"] Feb 02 22:47:29 crc kubenswrapper[4755]: W0202 22:47:29.292308 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbea73b39_f5c5_4290_a4f5_c1338552023f.slice/crio-744c1b7a87bbac127413ff6df5b054d0a58914e3dab643769240e5beafc0ae7e WatchSource:0}: Error finding container 744c1b7a87bbac127413ff6df5b054d0a58914e3dab643769240e5beafc0ae7e: Status 404 returned error can't find the container with id 744c1b7a87bbac127413ff6df5b054d0a58914e3dab643769240e5beafc0ae7e Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.335028 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.343825 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" event={"ID":"5e6d05f8-65b8-49af-a844-b8bac61552c2","Type":"ContainerStarted","Data":"7ae7a6f6370a655dcf12a1c8e7307df84f262b1345e46cc0b7088b8635dc1015"} Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.344577 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" event={"ID":"bea73b39-f5c5-4290-a4f5-c1338552023f","Type":"ContainerStarted","Data":"744c1b7a87bbac127413ff6df5b054d0a58914e3dab643769240e5beafc0ae7e"} Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.345511 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-sj7f5" event={"ID":"64c0f9fe-4e93-4135-bd8a-88e659d417d9","Type":"ContainerStarted","Data":"5ca8f48be45354544061c9ab77ae312b2d3306cf41f925331f44fc40fe2ec841"} Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.346259 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-z6gz8" event={"ID":"329545ba-b43d-4600-bc08-84159813a2e4","Type":"ContainerStarted","Data":"24ad736ee2e0b215364274bef2a49c763abeb699c6e48e19c4cb3b78fdc67e56"} Feb 02 22:47:29 crc kubenswrapper[4755]: I0202 22:47:29.528371 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-858d8647f5-gpgwq"] Feb 02 22:47:29 crc kubenswrapper[4755]: W0202 22:47:29.535152 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8489d677_75be_48e2_a963_0e4117f5ca06.slice/crio-595543bca5c39d9e46dcf2e1528640e178a6cb3d98f41c345f6559330684d4c7 WatchSource:0}: Error finding container 595543bca5c39d9e46dcf2e1528640e178a6cb3d98f41c345f6559330684d4c7: Status 404 returned error can't find the container with id 595543bca5c39d9e46dcf2e1528640e178a6cb3d98f41c345f6559330684d4c7 Feb 02 22:47:30 crc kubenswrapper[4755]: I0202 22:47:30.354938 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-858d8647f5-gpgwq" event={"ID":"8489d677-75be-48e2-a963-0e4117f5ca06","Type":"ContainerStarted","Data":"69f9f84e504e6a3c422b4df52ac4c62e47aced96854ceb2e62a6a82a14c96d92"} Feb 02 22:47:30 crc kubenswrapper[4755]: I0202 22:47:30.355247 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-858d8647f5-gpgwq" event={"ID":"8489d677-75be-48e2-a963-0e4117f5ca06","Type":"ContainerStarted","Data":"595543bca5c39d9e46dcf2e1528640e178a6cb3d98f41c345f6559330684d4c7"} Feb 02 22:47:30 crc kubenswrapper[4755]: I0202 22:47:30.372022 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-858d8647f5-gpgwq" podStartSLOduration=2.372008165 podStartE2EDuration="2.372008165s" podCreationTimestamp="2026-02-02 22:47:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:47:30.369581466 +0000 UTC m=+806.060801792" watchObservedRunningTime="2026-02-02 22:47:30.372008165 +0000 UTC m=+806.063228481" Feb 02 22:47:32 crc kubenswrapper[4755]: I0202 22:47:32.393320 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-z6gz8" event={"ID":"329545ba-b43d-4600-bc08-84159813a2e4","Type":"ContainerStarted","Data":"388182ffd0fa382f4edc1178fa7d2ef9255beebace997ad64489c96051619bfc"} Feb 02 22:47:32 crc kubenswrapper[4755]: I0202 22:47:32.401915 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" event={"ID":"bea73b39-f5c5-4290-a4f5-c1338552023f","Type":"ContainerStarted","Data":"38e674d694b2716fb9dad558fa6b0006a02c1e1e257273dc8077ac99dff802c0"} Feb 02 22:47:32 crc kubenswrapper[4755]: I0202 22:47:32.402886 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" Feb 02 22:47:32 crc kubenswrapper[4755]: I0202 22:47:32.420569 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" podStartSLOduration=1.5494400480000001 podStartE2EDuration="4.420543237s" podCreationTimestamp="2026-02-02 22:47:28 +0000 UTC" firstStartedPulling="2026-02-02 22:47:29.294984176 +0000 UTC m=+804.986204502" lastFinishedPulling="2026-02-02 22:47:32.166087335 +0000 UTC m=+807.857307691" observedRunningTime="2026-02-02 22:47:32.416141903 +0000 UTC m=+808.107362239" watchObservedRunningTime="2026-02-02 22:47:32.420543237 +0000 UTC m=+808.111763583" Feb 02 22:47:33 crc kubenswrapper[4755]: I0202 22:47:33.425022 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" event={"ID":"5e6d05f8-65b8-49af-a844-b8bac61552c2","Type":"ContainerStarted","Data":"335c5ba374ad29d66e3e07c1f01158fbe2531776dd40ef0156e0ed186aaee95f"} Feb 02 22:47:33 crc kubenswrapper[4755]: I0202 22:47:33.429273 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-sj7f5" event={"ID":"64c0f9fe-4e93-4135-bd8a-88e659d417d9","Type":"ContainerStarted","Data":"8f6ba4a73303ff3950cf3a458715bd4235a3e02d44d24d29decb5c723e1b757a"} Feb 02 22:47:33 crc kubenswrapper[4755]: I0202 22:47:33.456307 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-ht2qx" podStartSLOduration=2.563735919 podStartE2EDuration="5.456282192s" podCreationTimestamp="2026-02-02 22:47:28 +0000 UTC" firstStartedPulling="2026-02-02 22:47:29.239629336 +0000 UTC m=+804.930849652" lastFinishedPulling="2026-02-02 22:47:32.132175609 +0000 UTC m=+807.823395925" observedRunningTime="2026-02-02 22:47:33.455310325 +0000 UTC m=+809.146530681" watchObservedRunningTime="2026-02-02 22:47:33.456282192 +0000 UTC m=+809.147502548" Feb 02 22:47:33 crc kubenswrapper[4755]: I0202 22:47:33.482449 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-sj7f5" podStartSLOduration=2.255655325 podStartE2EDuration="5.482424099s" podCreationTimestamp="2026-02-02 22:47:28 +0000 UTC" firstStartedPulling="2026-02-02 22:47:28.939443274 +0000 UTC m=+804.630663600" lastFinishedPulling="2026-02-02 22:47:32.166212008 +0000 UTC m=+807.857432374" observedRunningTime="2026-02-02 22:47:33.471708387 +0000 UTC m=+809.162928753" watchObservedRunningTime="2026-02-02 22:47:33.482424099 +0000 UTC m=+809.173644455" Feb 02 22:47:33 crc kubenswrapper[4755]: I0202 22:47:33.826742 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:35 crc kubenswrapper[4755]: I0202 22:47:35.456505 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-z6gz8" event={"ID":"329545ba-b43d-4600-bc08-84159813a2e4","Type":"ContainerStarted","Data":"e8fe1ea7a54070c3783967d2bd2438d2758facf6a8a92b46a258975e97e1e057"} Feb 02 22:47:35 crc kubenswrapper[4755]: I0202 22:47:35.483035 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-z6gz8" podStartSLOduration=1.87869918 podStartE2EDuration="7.48301042s" podCreationTimestamp="2026-02-02 22:47:28 +0000 UTC" firstStartedPulling="2026-02-02 22:47:29.191244292 +0000 UTC m=+804.882464618" lastFinishedPulling="2026-02-02 22:47:34.795555522 +0000 UTC m=+810.486775858" observedRunningTime="2026-02-02 22:47:35.481407494 +0000 UTC m=+811.172627850" watchObservedRunningTime="2026-02-02 22:47:35.48301042 +0000 UTC m=+811.174230786" Feb 02 22:47:36 crc kubenswrapper[4755]: I0202 22:47:36.051363 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:36 crc kubenswrapper[4755]: I0202 22:47:36.152021 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:36 crc kubenswrapper[4755]: I0202 22:47:36.295203 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zbfzw"] Feb 02 22:47:37 crc kubenswrapper[4755]: I0202 22:47:37.485041 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zbfzw" podUID="51defda8-7ef7-454f-be74-fbd8596bb91c" containerName="registry-server" containerID="cri-o://516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93" gracePeriod=2 Feb 02 22:47:37 crc kubenswrapper[4755]: I0202 22:47:37.917641 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.000954 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51defda8-7ef7-454f-be74-fbd8596bb91c-utilities\") pod \"51defda8-7ef7-454f-be74-fbd8596bb91c\" (UID: \"51defda8-7ef7-454f-be74-fbd8596bb91c\") " Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.001055 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8jxs\" (UniqueName: \"kubernetes.io/projected/51defda8-7ef7-454f-be74-fbd8596bb91c-kube-api-access-x8jxs\") pod \"51defda8-7ef7-454f-be74-fbd8596bb91c\" (UID: \"51defda8-7ef7-454f-be74-fbd8596bb91c\") " Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.001149 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51defda8-7ef7-454f-be74-fbd8596bb91c-catalog-content\") pod \"51defda8-7ef7-454f-be74-fbd8596bb91c\" (UID: \"51defda8-7ef7-454f-be74-fbd8596bb91c\") " Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.002868 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51defda8-7ef7-454f-be74-fbd8596bb91c-utilities" (OuterVolumeSpecName: "utilities") pod "51defda8-7ef7-454f-be74-fbd8596bb91c" (UID: "51defda8-7ef7-454f-be74-fbd8596bb91c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.006207 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51defda8-7ef7-454f-be74-fbd8596bb91c-kube-api-access-x8jxs" (OuterVolumeSpecName: "kube-api-access-x8jxs") pod "51defda8-7ef7-454f-be74-fbd8596bb91c" (UID: "51defda8-7ef7-454f-be74-fbd8596bb91c"). InnerVolumeSpecName "kube-api-access-x8jxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.102409 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8jxs\" (UniqueName: \"kubernetes.io/projected/51defda8-7ef7-454f-be74-fbd8596bb91c-kube-api-access-x8jxs\") on node \"crc\" DevicePath \"\"" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.102704 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51defda8-7ef7-454f-be74-fbd8596bb91c-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.142912 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51defda8-7ef7-454f-be74-fbd8596bb91c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "51defda8-7ef7-454f-be74-fbd8596bb91c" (UID: "51defda8-7ef7-454f-be74-fbd8596bb91c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.204155 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51defda8-7ef7-454f-be74-fbd8596bb91c-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.498023 4755 generic.go:334] "Generic (PLEG): container finished" podID="51defda8-7ef7-454f-be74-fbd8596bb91c" containerID="516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93" exitCode=0 Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.498071 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zbfzw" event={"ID":"51defda8-7ef7-454f-be74-fbd8596bb91c","Type":"ContainerDied","Data":"516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93"} Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.498115 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zbfzw" event={"ID":"51defda8-7ef7-454f-be74-fbd8596bb91c","Type":"ContainerDied","Data":"2e92f50bcd4497cde11ee586c23b7d3088e10cca90437697ec63687fc798e5e8"} Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.498134 4755 scope.go:117] "RemoveContainer" containerID="516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.498165 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zbfzw" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.521432 4755 scope.go:117] "RemoveContainer" containerID="896100609d854c5a1570416e1b5a3baf3976bdc450dabeb1a138c0320c15b60e" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.543913 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zbfzw"] Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.551782 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zbfzw"] Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.561340 4755 scope.go:117] "RemoveContainer" containerID="a269de5b07166ac0557aab125422f6918139c3c09128af0cfbabde823d6cb5ae" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.606019 4755 scope.go:117] "RemoveContainer" containerID="516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93" Feb 02 22:47:38 crc kubenswrapper[4755]: E0202 22:47:38.607318 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93\": container with ID starting with 516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93 not found: ID does not exist" containerID="516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.607373 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93"} err="failed to get container status \"516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93\": rpc error: code = NotFound desc = could not find container \"516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93\": container with ID starting with 516f6cf6f8e23de6501273a8707e16ae56d3ad7e17916ee5fb33f61a06ca2f93 not found: ID does not exist" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.607405 4755 scope.go:117] "RemoveContainer" containerID="896100609d854c5a1570416e1b5a3baf3976bdc450dabeb1a138c0320c15b60e" Feb 02 22:47:38 crc kubenswrapper[4755]: E0202 22:47:38.610193 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"896100609d854c5a1570416e1b5a3baf3976bdc450dabeb1a138c0320c15b60e\": container with ID starting with 896100609d854c5a1570416e1b5a3baf3976bdc450dabeb1a138c0320c15b60e not found: ID does not exist" containerID="896100609d854c5a1570416e1b5a3baf3976bdc450dabeb1a138c0320c15b60e" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.610234 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"896100609d854c5a1570416e1b5a3baf3976bdc450dabeb1a138c0320c15b60e"} err="failed to get container status \"896100609d854c5a1570416e1b5a3baf3976bdc450dabeb1a138c0320c15b60e\": rpc error: code = NotFound desc = could not find container \"896100609d854c5a1570416e1b5a3baf3976bdc450dabeb1a138c0320c15b60e\": container with ID starting with 896100609d854c5a1570416e1b5a3baf3976bdc450dabeb1a138c0320c15b60e not found: ID does not exist" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.610263 4755 scope.go:117] "RemoveContainer" containerID="a269de5b07166ac0557aab125422f6918139c3c09128af0cfbabde823d6cb5ae" Feb 02 22:47:38 crc kubenswrapper[4755]: E0202 22:47:38.610863 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a269de5b07166ac0557aab125422f6918139c3c09128af0cfbabde823d6cb5ae\": container with ID starting with a269de5b07166ac0557aab125422f6918139c3c09128af0cfbabde823d6cb5ae not found: ID does not exist" containerID="a269de5b07166ac0557aab125422f6918139c3c09128af0cfbabde823d6cb5ae" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.610935 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a269de5b07166ac0557aab125422f6918139c3c09128af0cfbabde823d6cb5ae"} err="failed to get container status \"a269de5b07166ac0557aab125422f6918139c3c09128af0cfbabde823d6cb5ae\": rpc error: code = NotFound desc = could not find container \"a269de5b07166ac0557aab125422f6918139c3c09128af0cfbabde823d6cb5ae\": container with ID starting with a269de5b07166ac0557aab125422f6918139c3c09128af0cfbabde823d6cb5ae not found: ID does not exist" Feb 02 22:47:38 crc kubenswrapper[4755]: I0202 22:47:38.856933 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-sj7f5" Feb 02 22:47:39 crc kubenswrapper[4755]: I0202 22:47:39.080645 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51defda8-7ef7-454f-be74-fbd8596bb91c" path="/var/lib/kubelet/pods/51defda8-7ef7-454f-be74-fbd8596bb91c/volumes" Feb 02 22:47:39 crc kubenswrapper[4755]: I0202 22:47:39.335578 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:39 crc kubenswrapper[4755]: I0202 22:47:39.335671 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:39 crc kubenswrapper[4755]: I0202 22:47:39.343456 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:39 crc kubenswrapper[4755]: I0202 22:47:39.513544 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-858d8647f5-gpgwq" Feb 02 22:47:39 crc kubenswrapper[4755]: I0202 22:47:39.594587 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-rwshx"] Feb 02 22:47:49 crc kubenswrapper[4755]: I0202 22:47:49.095930 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xtzkv" Feb 02 22:48:04 crc kubenswrapper[4755]: I0202 22:48:04.639663 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-rwshx" podUID="0124b915-2ac4-4be7-b356-bf78a8295d9d" containerName="console" containerID="cri-o://b054a350a4956321b512d82c2a40f0148c3cbe1b5a3613f3a6240087df8b4cca" gracePeriod=15 Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.125329 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-rwshx_0124b915-2ac4-4be7-b356-bf78a8295d9d/console/0.log" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.125717 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.208590 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-oauth-serving-cert\") pod \"0124b915-2ac4-4be7-b356-bf78a8295d9d\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.208636 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-service-ca\") pod \"0124b915-2ac4-4be7-b356-bf78a8295d9d\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.208677 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-config\") pod \"0124b915-2ac4-4be7-b356-bf78a8295d9d\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.208713 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4xfm\" (UniqueName: \"kubernetes.io/projected/0124b915-2ac4-4be7-b356-bf78a8295d9d-kube-api-access-q4xfm\") pod \"0124b915-2ac4-4be7-b356-bf78a8295d9d\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.208746 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-trusted-ca-bundle\") pod \"0124b915-2ac4-4be7-b356-bf78a8295d9d\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.208773 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-serving-cert\") pod \"0124b915-2ac4-4be7-b356-bf78a8295d9d\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.208793 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-oauth-config\") pod \"0124b915-2ac4-4be7-b356-bf78a8295d9d\" (UID: \"0124b915-2ac4-4be7-b356-bf78a8295d9d\") " Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.209467 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-config" (OuterVolumeSpecName: "console-config") pod "0124b915-2ac4-4be7-b356-bf78a8295d9d" (UID: "0124b915-2ac4-4be7-b356-bf78a8295d9d"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.209479 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-service-ca" (OuterVolumeSpecName: "service-ca") pod "0124b915-2ac4-4be7-b356-bf78a8295d9d" (UID: "0124b915-2ac4-4be7-b356-bf78a8295d9d"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.209572 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "0124b915-2ac4-4be7-b356-bf78a8295d9d" (UID: "0124b915-2ac4-4be7-b356-bf78a8295d9d"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.209932 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "0124b915-2ac4-4be7-b356-bf78a8295d9d" (UID: "0124b915-2ac4-4be7-b356-bf78a8295d9d"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.214302 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0124b915-2ac4-4be7-b356-bf78a8295d9d-kube-api-access-q4xfm" (OuterVolumeSpecName: "kube-api-access-q4xfm") pod "0124b915-2ac4-4be7-b356-bf78a8295d9d" (UID: "0124b915-2ac4-4be7-b356-bf78a8295d9d"). InnerVolumeSpecName "kube-api-access-q4xfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.214323 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "0124b915-2ac4-4be7-b356-bf78a8295d9d" (UID: "0124b915-2ac4-4be7-b356-bf78a8295d9d"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.214777 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "0124b915-2ac4-4be7-b356-bf78a8295d9d" (UID: "0124b915-2ac4-4be7-b356-bf78a8295d9d"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.310390 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4xfm\" (UniqueName: \"kubernetes.io/projected/0124b915-2ac4-4be7-b356-bf78a8295d9d-kube-api-access-q4xfm\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.310418 4755 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.310431 4755 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.310439 4755 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.310447 4755 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.310455 4755 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.310463 4755 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0124b915-2ac4-4be7-b356-bf78a8295d9d-console-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.586032 4755 scope.go:117] "RemoveContainer" containerID="b054a350a4956321b512d82c2a40f0148c3cbe1b5a3613f3a6240087df8b4cca" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.715130 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-rwshx" event={"ID":"0124b915-2ac4-4be7-b356-bf78a8295d9d","Type":"ContainerDied","Data":"b054a350a4956321b512d82c2a40f0148c3cbe1b5a3613f3a6240087df8b4cca"} Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.715498 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-rwshx" event={"ID":"0124b915-2ac4-4be7-b356-bf78a8295d9d","Type":"ContainerDied","Data":"0e49a6d6b04d60ab110958c19cbec811b81e0d7e3b7c6dc461753113ace371e1"} Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.715147 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-rwshx" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.757423 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-rwshx"] Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.766521 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-rwshx"] Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.781939 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv"] Feb 02 22:48:05 crc kubenswrapper[4755]: E0202 22:48:05.782455 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51defda8-7ef7-454f-be74-fbd8596bb91c" containerName="extract-content" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.782523 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="51defda8-7ef7-454f-be74-fbd8596bb91c" containerName="extract-content" Feb 02 22:48:05 crc kubenswrapper[4755]: E0202 22:48:05.782585 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51defda8-7ef7-454f-be74-fbd8596bb91c" containerName="extract-utilities" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.782635 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="51defda8-7ef7-454f-be74-fbd8596bb91c" containerName="extract-utilities" Feb 02 22:48:05 crc kubenswrapper[4755]: E0202 22:48:05.782685 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51defda8-7ef7-454f-be74-fbd8596bb91c" containerName="registry-server" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.782755 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="51defda8-7ef7-454f-be74-fbd8596bb91c" containerName="registry-server" Feb 02 22:48:05 crc kubenswrapper[4755]: E0202 22:48:05.782820 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0124b915-2ac4-4be7-b356-bf78a8295d9d" containerName="console" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.782878 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0124b915-2ac4-4be7-b356-bf78a8295d9d" containerName="console" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.783063 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="0124b915-2ac4-4be7-b356-bf78a8295d9d" containerName="console" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.783130 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="51defda8-7ef7-454f-be74-fbd8596bb91c" containerName="registry-server" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.783934 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.786175 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.789695 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv"] Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.920044 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knxz8\" (UniqueName: \"kubernetes.io/projected/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-kube-api-access-knxz8\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv\" (UID: \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.920172 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv\" (UID: \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:05 crc kubenswrapper[4755]: I0202 22:48:05.920213 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv\" (UID: \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:06 crc kubenswrapper[4755]: I0202 22:48:06.022534 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv\" (UID: \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:06 crc kubenswrapper[4755]: I0202 22:48:06.022641 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knxz8\" (UniqueName: \"kubernetes.io/projected/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-kube-api-access-knxz8\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv\" (UID: \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:06 crc kubenswrapper[4755]: I0202 22:48:06.022784 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv\" (UID: \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:06 crc kubenswrapper[4755]: I0202 22:48:06.023471 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv\" (UID: \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:06 crc kubenswrapper[4755]: I0202 22:48:06.024388 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv\" (UID: \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:06 crc kubenswrapper[4755]: I0202 22:48:06.051695 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knxz8\" (UniqueName: \"kubernetes.io/projected/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-kube-api-access-knxz8\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv\" (UID: \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:06 crc kubenswrapper[4755]: I0202 22:48:06.099422 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:06 crc kubenswrapper[4755]: I0202 22:48:06.549858 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv"] Feb 02 22:48:06 crc kubenswrapper[4755]: W0202 22:48:06.565531 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ef6a0bb_70aa_4908_b7fe_2ea6c8e13e9b.slice/crio-257fea1e377764950459718a6bb1be476115c1fe6a829b175db499ef3d58232a WatchSource:0}: Error finding container 257fea1e377764950459718a6bb1be476115c1fe6a829b175db499ef3d58232a: Status 404 returned error can't find the container with id 257fea1e377764950459718a6bb1be476115c1fe6a829b175db499ef3d58232a Feb 02 22:48:06 crc kubenswrapper[4755]: I0202 22:48:06.723900 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" event={"ID":"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b","Type":"ContainerStarted","Data":"257fea1e377764950459718a6bb1be476115c1fe6a829b175db499ef3d58232a"} Feb 02 22:48:07 crc kubenswrapper[4755]: I0202 22:48:07.083031 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0124b915-2ac4-4be7-b356-bf78a8295d9d" path="/var/lib/kubelet/pods/0124b915-2ac4-4be7-b356-bf78a8295d9d/volumes" Feb 02 22:48:07 crc kubenswrapper[4755]: I0202 22:48:07.735003 4755 generic.go:334] "Generic (PLEG): container finished" podID="8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" containerID="929f15be7c5148681e563ad4b0c508058eb6e752165a792e5a2b5116ea22630c" exitCode=0 Feb 02 22:48:07 crc kubenswrapper[4755]: I0202 22:48:07.735091 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" event={"ID":"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b","Type":"ContainerDied","Data":"929f15be7c5148681e563ad4b0c508058eb6e752165a792e5a2b5116ea22630c"} Feb 02 22:48:09 crc kubenswrapper[4755]: I0202 22:48:09.768835 4755 generic.go:334] "Generic (PLEG): container finished" podID="8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" containerID="424ebb1f01f1952e495ff0f12e32e19b1a00a11baf1b604dd4e2fba902594c0b" exitCode=0 Feb 02 22:48:09 crc kubenswrapper[4755]: I0202 22:48:09.768932 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" event={"ID":"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b","Type":"ContainerDied","Data":"424ebb1f01f1952e495ff0f12e32e19b1a00a11baf1b604dd4e2fba902594c0b"} Feb 02 22:48:10 crc kubenswrapper[4755]: I0202 22:48:10.776742 4755 generic.go:334] "Generic (PLEG): container finished" podID="8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" containerID="7902c2df0c50a4aa0865022cea9f15101a4c88f9a4bc7d97806a2cc3f0bf1018" exitCode=0 Feb 02 22:48:10 crc kubenswrapper[4755]: I0202 22:48:10.776788 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" event={"ID":"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b","Type":"ContainerDied","Data":"7902c2df0c50a4aa0865022cea9f15101a4c88f9a4bc7d97806a2cc3f0bf1018"} Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.049480 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.208826 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-bundle\") pod \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\" (UID: \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\") " Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.208916 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knxz8\" (UniqueName: \"kubernetes.io/projected/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-kube-api-access-knxz8\") pod \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\" (UID: \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\") " Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.208968 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-util\") pod \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\" (UID: \"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b\") " Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.210469 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-bundle" (OuterVolumeSpecName: "bundle") pod "8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" (UID: "8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.217608 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-kube-api-access-knxz8" (OuterVolumeSpecName: "kube-api-access-knxz8") pod "8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" (UID: "8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b"). InnerVolumeSpecName "kube-api-access-knxz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.235638 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-util" (OuterVolumeSpecName: "util") pod "8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" (UID: "8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.310329 4755 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.310383 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knxz8\" (UniqueName: \"kubernetes.io/projected/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-kube-api-access-knxz8\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.310404 4755 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b-util\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.795908 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" event={"ID":"8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b","Type":"ContainerDied","Data":"257fea1e377764950459718a6bb1be476115c1fe6a829b175db499ef3d58232a"} Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.795952 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="257fea1e377764950459718a6bb1be476115c1fe6a829b175db499ef3d58232a" Feb 02 22:48:12 crc kubenswrapper[4755]: I0202 22:48:12.796107 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.611280 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf"] Feb 02 22:48:20 crc kubenswrapper[4755]: E0202 22:48:20.612144 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" containerName="util" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.612158 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" containerName="util" Feb 02 22:48:20 crc kubenswrapper[4755]: E0202 22:48:20.612172 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" containerName="extract" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.612178 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" containerName="extract" Feb 02 22:48:20 crc kubenswrapper[4755]: E0202 22:48:20.612186 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" containerName="pull" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.612196 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" containerName="pull" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.612318 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b" containerName="extract" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.612819 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.615021 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.615888 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-gfk4p" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.616334 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.617045 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.617467 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.628363 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf"] Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.720709 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgjnt\" (UniqueName: \"kubernetes.io/projected/eae78a10-5c8c-4917-a954-fc548de08005-kube-api-access-wgjnt\") pod \"metallb-operator-controller-manager-f4d7f4757-x7zlf\" (UID: \"eae78a10-5c8c-4917-a954-fc548de08005\") " pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.720786 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eae78a10-5c8c-4917-a954-fc548de08005-apiservice-cert\") pod \"metallb-operator-controller-manager-f4d7f4757-x7zlf\" (UID: \"eae78a10-5c8c-4917-a954-fc548de08005\") " pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.720824 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eae78a10-5c8c-4917-a954-fc548de08005-webhook-cert\") pod \"metallb-operator-controller-manager-f4d7f4757-x7zlf\" (UID: \"eae78a10-5c8c-4917-a954-fc548de08005\") " pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.821785 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgjnt\" (UniqueName: \"kubernetes.io/projected/eae78a10-5c8c-4917-a954-fc548de08005-kube-api-access-wgjnt\") pod \"metallb-operator-controller-manager-f4d7f4757-x7zlf\" (UID: \"eae78a10-5c8c-4917-a954-fc548de08005\") " pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.822034 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eae78a10-5c8c-4917-a954-fc548de08005-apiservice-cert\") pod \"metallb-operator-controller-manager-f4d7f4757-x7zlf\" (UID: \"eae78a10-5c8c-4917-a954-fc548de08005\") " pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.822062 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eae78a10-5c8c-4917-a954-fc548de08005-webhook-cert\") pod \"metallb-operator-controller-manager-f4d7f4757-x7zlf\" (UID: \"eae78a10-5c8c-4917-a954-fc548de08005\") " pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.827634 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eae78a10-5c8c-4917-a954-fc548de08005-webhook-cert\") pod \"metallb-operator-controller-manager-f4d7f4757-x7zlf\" (UID: \"eae78a10-5c8c-4917-a954-fc548de08005\") " pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.841666 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eae78a10-5c8c-4917-a954-fc548de08005-apiservice-cert\") pod \"metallb-operator-controller-manager-f4d7f4757-x7zlf\" (UID: \"eae78a10-5c8c-4917-a954-fc548de08005\") " pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.845593 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgjnt\" (UniqueName: \"kubernetes.io/projected/eae78a10-5c8c-4917-a954-fc548de08005-kube-api-access-wgjnt\") pod \"metallb-operator-controller-manager-f4d7f4757-x7zlf\" (UID: \"eae78a10-5c8c-4917-a954-fc548de08005\") " pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.951560 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr"] Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.952298 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.954559 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.955042 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.955461 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-l5xk8" Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.963922 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr"] Feb 02 22:48:20 crc kubenswrapper[4755]: I0202 22:48:20.981045 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.125460 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/599e170d-dc41-4c07-978b-616ce79d338d-apiservice-cert\") pod \"metallb-operator-webhook-server-65d88c686d-p24pr\" (UID: \"599e170d-dc41-4c07-978b-616ce79d338d\") " pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.125853 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/599e170d-dc41-4c07-978b-616ce79d338d-webhook-cert\") pod \"metallb-operator-webhook-server-65d88c686d-p24pr\" (UID: \"599e170d-dc41-4c07-978b-616ce79d338d\") " pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.125883 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxv7b\" (UniqueName: \"kubernetes.io/projected/599e170d-dc41-4c07-978b-616ce79d338d-kube-api-access-hxv7b\") pod \"metallb-operator-webhook-server-65d88c686d-p24pr\" (UID: \"599e170d-dc41-4c07-978b-616ce79d338d\") " pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.228189 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/599e170d-dc41-4c07-978b-616ce79d338d-apiservice-cert\") pod \"metallb-operator-webhook-server-65d88c686d-p24pr\" (UID: \"599e170d-dc41-4c07-978b-616ce79d338d\") " pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.228331 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/599e170d-dc41-4c07-978b-616ce79d338d-webhook-cert\") pod \"metallb-operator-webhook-server-65d88c686d-p24pr\" (UID: \"599e170d-dc41-4c07-978b-616ce79d338d\") " pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.228363 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxv7b\" (UniqueName: \"kubernetes.io/projected/599e170d-dc41-4c07-978b-616ce79d338d-kube-api-access-hxv7b\") pod \"metallb-operator-webhook-server-65d88c686d-p24pr\" (UID: \"599e170d-dc41-4c07-978b-616ce79d338d\") " pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.233558 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/599e170d-dc41-4c07-978b-616ce79d338d-webhook-cert\") pod \"metallb-operator-webhook-server-65d88c686d-p24pr\" (UID: \"599e170d-dc41-4c07-978b-616ce79d338d\") " pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.235974 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/599e170d-dc41-4c07-978b-616ce79d338d-apiservice-cert\") pod \"metallb-operator-webhook-server-65d88c686d-p24pr\" (UID: \"599e170d-dc41-4c07-978b-616ce79d338d\") " pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.248904 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxv7b\" (UniqueName: \"kubernetes.io/projected/599e170d-dc41-4c07-978b-616ce79d338d-kube-api-access-hxv7b\") pod \"metallb-operator-webhook-server-65d88c686d-p24pr\" (UID: \"599e170d-dc41-4c07-978b-616ce79d338d\") " pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.265908 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.389816 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf"] Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.724557 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr"] Feb 02 22:48:21 crc kubenswrapper[4755]: W0202 22:48:21.737266 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod599e170d_dc41_4c07_978b_616ce79d338d.slice/crio-4c84531e12c4973a972777803b8a691e5faf0fd6a6eb6ab5f72ba117e6d191db WatchSource:0}: Error finding container 4c84531e12c4973a972777803b8a691e5faf0fd6a6eb6ab5f72ba117e6d191db: Status 404 returned error can't find the container with id 4c84531e12c4973a972777803b8a691e5faf0fd6a6eb6ab5f72ba117e6d191db Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.850161 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" event={"ID":"599e170d-dc41-4c07-978b-616ce79d338d","Type":"ContainerStarted","Data":"4c84531e12c4973a972777803b8a691e5faf0fd6a6eb6ab5f72ba117e6d191db"} Feb 02 22:48:21 crc kubenswrapper[4755]: I0202 22:48:21.851065 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" event={"ID":"eae78a10-5c8c-4917-a954-fc548de08005","Type":"ContainerStarted","Data":"98aa412f7d52c55e391175d3f025aceb00e895de2e97676fe1b73552d7b4e52b"} Feb 02 22:48:24 crc kubenswrapper[4755]: I0202 22:48:24.894432 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" event={"ID":"eae78a10-5c8c-4917-a954-fc548de08005","Type":"ContainerStarted","Data":"63c6152579fb7809054bdb9763b08c4252c8fbac86ad890e6dd2ce154b6be84a"} Feb 02 22:48:24 crc kubenswrapper[4755]: I0202 22:48:24.895000 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:48:24 crc kubenswrapper[4755]: I0202 22:48:24.920150 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" podStartSLOduration=1.6911680489999998 podStartE2EDuration="4.920131421s" podCreationTimestamp="2026-02-02 22:48:20 +0000 UTC" firstStartedPulling="2026-02-02 22:48:21.3996857 +0000 UTC m=+857.090906026" lastFinishedPulling="2026-02-02 22:48:24.628649072 +0000 UTC m=+860.319869398" observedRunningTime="2026-02-02 22:48:24.915108541 +0000 UTC m=+860.606328887" watchObservedRunningTime="2026-02-02 22:48:24.920131421 +0000 UTC m=+860.611351757" Feb 02 22:48:26 crc kubenswrapper[4755]: I0202 22:48:26.906152 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" event={"ID":"599e170d-dc41-4c07-978b-616ce79d338d","Type":"ContainerStarted","Data":"41c0e75aae9c2d4c1f8f313f3a5051a1cb16b3e1d47c0163c567746c95931c59"} Feb 02 22:48:26 crc kubenswrapper[4755]: I0202 22:48:26.907359 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:26 crc kubenswrapper[4755]: I0202 22:48:26.936647 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" podStartSLOduration=2.071014008 podStartE2EDuration="6.936626217s" podCreationTimestamp="2026-02-02 22:48:20 +0000 UTC" firstStartedPulling="2026-02-02 22:48:21.743537537 +0000 UTC m=+857.434757873" lastFinishedPulling="2026-02-02 22:48:26.609149756 +0000 UTC m=+862.300370082" observedRunningTime="2026-02-02 22:48:26.934511198 +0000 UTC m=+862.625731524" watchObservedRunningTime="2026-02-02 22:48:26.936626217 +0000 UTC m=+862.627846543" Feb 02 22:48:41 crc kubenswrapper[4755]: I0202 22:48:41.273645 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-65d88c686d-p24pr" Feb 02 22:48:42 crc kubenswrapper[4755]: I0202 22:48:42.777944 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8m8tk"] Feb 02 22:48:42 crc kubenswrapper[4755]: I0202 22:48:42.779301 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:42 crc kubenswrapper[4755]: I0202 22:48:42.793745 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8m8tk"] Feb 02 22:48:42 crc kubenswrapper[4755]: I0202 22:48:42.940295 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3071ed18-84a7-46aa-acc5-cf551b00b0a8-catalog-content\") pod \"community-operators-8m8tk\" (UID: \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\") " pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:42 crc kubenswrapper[4755]: I0202 22:48:42.940369 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3071ed18-84a7-46aa-acc5-cf551b00b0a8-utilities\") pod \"community-operators-8m8tk\" (UID: \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\") " pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:42 crc kubenswrapper[4755]: I0202 22:48:42.940707 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4bgr\" (UniqueName: \"kubernetes.io/projected/3071ed18-84a7-46aa-acc5-cf551b00b0a8-kube-api-access-h4bgr\") pod \"community-operators-8m8tk\" (UID: \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\") " pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:43 crc kubenswrapper[4755]: I0202 22:48:43.042223 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4bgr\" (UniqueName: \"kubernetes.io/projected/3071ed18-84a7-46aa-acc5-cf551b00b0a8-kube-api-access-h4bgr\") pod \"community-operators-8m8tk\" (UID: \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\") " pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:43 crc kubenswrapper[4755]: I0202 22:48:43.042343 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3071ed18-84a7-46aa-acc5-cf551b00b0a8-catalog-content\") pod \"community-operators-8m8tk\" (UID: \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\") " pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:43 crc kubenswrapper[4755]: I0202 22:48:43.042372 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3071ed18-84a7-46aa-acc5-cf551b00b0a8-utilities\") pod \"community-operators-8m8tk\" (UID: \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\") " pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:43 crc kubenswrapper[4755]: I0202 22:48:43.042953 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3071ed18-84a7-46aa-acc5-cf551b00b0a8-catalog-content\") pod \"community-operators-8m8tk\" (UID: \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\") " pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:43 crc kubenswrapper[4755]: I0202 22:48:43.043101 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3071ed18-84a7-46aa-acc5-cf551b00b0a8-utilities\") pod \"community-operators-8m8tk\" (UID: \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\") " pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:43 crc kubenswrapper[4755]: I0202 22:48:43.063630 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4bgr\" (UniqueName: \"kubernetes.io/projected/3071ed18-84a7-46aa-acc5-cf551b00b0a8-kube-api-access-h4bgr\") pod \"community-operators-8m8tk\" (UID: \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\") " pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:43 crc kubenswrapper[4755]: I0202 22:48:43.147923 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:43 crc kubenswrapper[4755]: I0202 22:48:43.604869 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8m8tk"] Feb 02 22:48:43 crc kubenswrapper[4755]: W0202 22:48:43.609900 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3071ed18_84a7_46aa_acc5_cf551b00b0a8.slice/crio-825acacd85155f97480dcc1885129acfe5b5705526e1881b8d0e3f37efada8bc WatchSource:0}: Error finding container 825acacd85155f97480dcc1885129acfe5b5705526e1881b8d0e3f37efada8bc: Status 404 returned error can't find the container with id 825acacd85155f97480dcc1885129acfe5b5705526e1881b8d0e3f37efada8bc Feb 02 22:48:44 crc kubenswrapper[4755]: I0202 22:48:44.032493 4755 generic.go:334] "Generic (PLEG): container finished" podID="3071ed18-84a7-46aa-acc5-cf551b00b0a8" containerID="9fbbc150cdf465018658b1133093653a49aee6e72b36293d0e70dd3da6021042" exitCode=0 Feb 02 22:48:44 crc kubenswrapper[4755]: I0202 22:48:44.032530 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8m8tk" event={"ID":"3071ed18-84a7-46aa-acc5-cf551b00b0a8","Type":"ContainerDied","Data":"9fbbc150cdf465018658b1133093653a49aee6e72b36293d0e70dd3da6021042"} Feb 02 22:48:44 crc kubenswrapper[4755]: I0202 22:48:44.032555 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8m8tk" event={"ID":"3071ed18-84a7-46aa-acc5-cf551b00b0a8","Type":"ContainerStarted","Data":"825acacd85155f97480dcc1885129acfe5b5705526e1881b8d0e3f37efada8bc"} Feb 02 22:48:45 crc kubenswrapper[4755]: I0202 22:48:45.043159 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8m8tk" event={"ID":"3071ed18-84a7-46aa-acc5-cf551b00b0a8","Type":"ContainerStarted","Data":"c0932337f4590c8f1f93b25c31e96fd0ffceefa4e8ee57ab834fc95a569dd459"} Feb 02 22:48:46 crc kubenswrapper[4755]: I0202 22:48:46.053912 4755 generic.go:334] "Generic (PLEG): container finished" podID="3071ed18-84a7-46aa-acc5-cf551b00b0a8" containerID="c0932337f4590c8f1f93b25c31e96fd0ffceefa4e8ee57ab834fc95a569dd459" exitCode=0 Feb 02 22:48:46 crc kubenswrapper[4755]: I0202 22:48:46.054261 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8m8tk" event={"ID":"3071ed18-84a7-46aa-acc5-cf551b00b0a8","Type":"ContainerDied","Data":"c0932337f4590c8f1f93b25c31e96fd0ffceefa4e8ee57ab834fc95a569dd459"} Feb 02 22:48:47 crc kubenswrapper[4755]: I0202 22:48:47.067154 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8m8tk" event={"ID":"3071ed18-84a7-46aa-acc5-cf551b00b0a8","Type":"ContainerStarted","Data":"dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f"} Feb 02 22:48:47 crc kubenswrapper[4755]: I0202 22:48:47.109296 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8m8tk" podStartSLOduration=2.645945303 podStartE2EDuration="5.109273751s" podCreationTimestamp="2026-02-02 22:48:42 +0000 UTC" firstStartedPulling="2026-02-02 22:48:44.036019113 +0000 UTC m=+879.727239439" lastFinishedPulling="2026-02-02 22:48:46.499347531 +0000 UTC m=+882.190567887" observedRunningTime="2026-02-02 22:48:47.100933809 +0000 UTC m=+882.792154195" watchObservedRunningTime="2026-02-02 22:48:47.109273751 +0000 UTC m=+882.800494107" Feb 02 22:48:53 crc kubenswrapper[4755]: I0202 22:48:53.148854 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:53 crc kubenswrapper[4755]: I0202 22:48:53.149548 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:53 crc kubenswrapper[4755]: I0202 22:48:53.226516 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:54 crc kubenswrapper[4755]: I0202 22:48:54.186503 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:54 crc kubenswrapper[4755]: I0202 22:48:54.264363 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8m8tk"] Feb 02 22:48:56 crc kubenswrapper[4755]: I0202 22:48:56.133555 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8m8tk" podUID="3071ed18-84a7-46aa-acc5-cf551b00b0a8" containerName="registry-server" containerID="cri-o://dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f" gracePeriod=2 Feb 02 22:48:56 crc kubenswrapper[4755]: I0202 22:48:56.567065 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:56 crc kubenswrapper[4755]: I0202 22:48:56.672549 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3071ed18-84a7-46aa-acc5-cf551b00b0a8-utilities\") pod \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\" (UID: \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\") " Feb 02 22:48:56 crc kubenswrapper[4755]: I0202 22:48:56.672668 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3071ed18-84a7-46aa-acc5-cf551b00b0a8-catalog-content\") pod \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\" (UID: \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\") " Feb 02 22:48:56 crc kubenswrapper[4755]: I0202 22:48:56.672697 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4bgr\" (UniqueName: \"kubernetes.io/projected/3071ed18-84a7-46aa-acc5-cf551b00b0a8-kube-api-access-h4bgr\") pod \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\" (UID: \"3071ed18-84a7-46aa-acc5-cf551b00b0a8\") " Feb 02 22:48:56 crc kubenswrapper[4755]: I0202 22:48:56.673555 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3071ed18-84a7-46aa-acc5-cf551b00b0a8-utilities" (OuterVolumeSpecName: "utilities") pod "3071ed18-84a7-46aa-acc5-cf551b00b0a8" (UID: "3071ed18-84a7-46aa-acc5-cf551b00b0a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:48:56 crc kubenswrapper[4755]: I0202 22:48:56.677203 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3071ed18-84a7-46aa-acc5-cf551b00b0a8-kube-api-access-h4bgr" (OuterVolumeSpecName: "kube-api-access-h4bgr") pod "3071ed18-84a7-46aa-acc5-cf551b00b0a8" (UID: "3071ed18-84a7-46aa-acc5-cf551b00b0a8"). InnerVolumeSpecName "kube-api-access-h4bgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:48:56 crc kubenswrapper[4755]: I0202 22:48:56.726795 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3071ed18-84a7-46aa-acc5-cf551b00b0a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3071ed18-84a7-46aa-acc5-cf551b00b0a8" (UID: "3071ed18-84a7-46aa-acc5-cf551b00b0a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:48:56 crc kubenswrapper[4755]: I0202 22:48:56.774493 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3071ed18-84a7-46aa-acc5-cf551b00b0a8-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:56 crc kubenswrapper[4755]: I0202 22:48:56.774546 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3071ed18-84a7-46aa-acc5-cf551b00b0a8-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:56 crc kubenswrapper[4755]: I0202 22:48:56.774571 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4bgr\" (UniqueName: \"kubernetes.io/projected/3071ed18-84a7-46aa-acc5-cf551b00b0a8-kube-api-access-h4bgr\") on node \"crc\" DevicePath \"\"" Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.146091 4755 generic.go:334] "Generic (PLEG): container finished" podID="3071ed18-84a7-46aa-acc5-cf551b00b0a8" containerID="dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f" exitCode=0 Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.146133 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8m8tk" event={"ID":"3071ed18-84a7-46aa-acc5-cf551b00b0a8","Type":"ContainerDied","Data":"dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f"} Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.146162 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8m8tk" event={"ID":"3071ed18-84a7-46aa-acc5-cf551b00b0a8","Type":"ContainerDied","Data":"825acacd85155f97480dcc1885129acfe5b5705526e1881b8d0e3f37efada8bc"} Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.146185 4755 scope.go:117] "RemoveContainer" containerID="dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f" Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.146206 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8m8tk" Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.180890 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8m8tk"] Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.183143 4755 scope.go:117] "RemoveContainer" containerID="c0932337f4590c8f1f93b25c31e96fd0ffceefa4e8ee57ab834fc95a569dd459" Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.186908 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8m8tk"] Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.208622 4755 scope.go:117] "RemoveContainer" containerID="9fbbc150cdf465018658b1133093653a49aee6e72b36293d0e70dd3da6021042" Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.229759 4755 scope.go:117] "RemoveContainer" containerID="dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f" Feb 02 22:48:57 crc kubenswrapper[4755]: E0202 22:48:57.230277 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f\": container with ID starting with dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f not found: ID does not exist" containerID="dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f" Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.230330 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f"} err="failed to get container status \"dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f\": rpc error: code = NotFound desc = could not find container \"dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f\": container with ID starting with dedd9891481a327e46c504cf33869284290336eede325d3e0590e61386138f6f not found: ID does not exist" Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.230366 4755 scope.go:117] "RemoveContainer" containerID="c0932337f4590c8f1f93b25c31e96fd0ffceefa4e8ee57ab834fc95a569dd459" Feb 02 22:48:57 crc kubenswrapper[4755]: E0202 22:48:57.230939 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0932337f4590c8f1f93b25c31e96fd0ffceefa4e8ee57ab834fc95a569dd459\": container with ID starting with c0932337f4590c8f1f93b25c31e96fd0ffceefa4e8ee57ab834fc95a569dd459 not found: ID does not exist" containerID="c0932337f4590c8f1f93b25c31e96fd0ffceefa4e8ee57ab834fc95a569dd459" Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.230963 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0932337f4590c8f1f93b25c31e96fd0ffceefa4e8ee57ab834fc95a569dd459"} err="failed to get container status \"c0932337f4590c8f1f93b25c31e96fd0ffceefa4e8ee57ab834fc95a569dd459\": rpc error: code = NotFound desc = could not find container \"c0932337f4590c8f1f93b25c31e96fd0ffceefa4e8ee57ab834fc95a569dd459\": container with ID starting with c0932337f4590c8f1f93b25c31e96fd0ffceefa4e8ee57ab834fc95a569dd459 not found: ID does not exist" Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.230983 4755 scope.go:117] "RemoveContainer" containerID="9fbbc150cdf465018658b1133093653a49aee6e72b36293d0e70dd3da6021042" Feb 02 22:48:57 crc kubenswrapper[4755]: E0202 22:48:57.231202 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fbbc150cdf465018658b1133093653a49aee6e72b36293d0e70dd3da6021042\": container with ID starting with 9fbbc150cdf465018658b1133093653a49aee6e72b36293d0e70dd3da6021042 not found: ID does not exist" containerID="9fbbc150cdf465018658b1133093653a49aee6e72b36293d0e70dd3da6021042" Feb 02 22:48:57 crc kubenswrapper[4755]: I0202 22:48:57.231233 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fbbc150cdf465018658b1133093653a49aee6e72b36293d0e70dd3da6021042"} err="failed to get container status \"9fbbc150cdf465018658b1133093653a49aee6e72b36293d0e70dd3da6021042\": rpc error: code = NotFound desc = could not find container \"9fbbc150cdf465018658b1133093653a49aee6e72b36293d0e70dd3da6021042\": container with ID starting with 9fbbc150cdf465018658b1133093653a49aee6e72b36293d0e70dd3da6021042 not found: ID does not exist" Feb 02 22:48:59 crc kubenswrapper[4755]: I0202 22:48:59.081808 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3071ed18-84a7-46aa-acc5-cf551b00b0a8" path="/var/lib/kubelet/pods/3071ed18-84a7-46aa-acc5-cf551b00b0a8/volumes" Feb 02 22:49:00 crc kubenswrapper[4755]: I0202 22:49:00.985157 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-f4d7f4757-x7zlf" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.747073 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-g4qwg"] Feb 02 22:49:01 crc kubenswrapper[4755]: E0202 22:49:01.747367 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3071ed18-84a7-46aa-acc5-cf551b00b0a8" containerName="extract-content" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.747387 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3071ed18-84a7-46aa-acc5-cf551b00b0a8" containerName="extract-content" Feb 02 22:49:01 crc kubenswrapper[4755]: E0202 22:49:01.747402 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3071ed18-84a7-46aa-acc5-cf551b00b0a8" containerName="registry-server" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.747411 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3071ed18-84a7-46aa-acc5-cf551b00b0a8" containerName="registry-server" Feb 02 22:49:01 crc kubenswrapper[4755]: E0202 22:49:01.747432 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3071ed18-84a7-46aa-acc5-cf551b00b0a8" containerName="extract-utilities" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.747440 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3071ed18-84a7-46aa-acc5-cf551b00b0a8" containerName="extract-utilities" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.747588 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3071ed18-84a7-46aa-acc5-cf551b00b0a8" containerName="registry-server" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.750337 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: W0202 22:49:01.752620 4755 reflector.go:561] object-"metallb-system"/"frr-k8s-daemon-dockercfg-jm8zf": failed to list *v1.Secret: secrets "frr-k8s-daemon-dockercfg-jm8zf" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "metallb-system": no relationship found between node 'crc' and this object Feb 02 22:49:01 crc kubenswrapper[4755]: E0202 22:49:01.752853 4755 reflector.go:158] "Unhandled Error" err="object-\"metallb-system\"/\"frr-k8s-daemon-dockercfg-jm8zf\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"frr-k8s-daemon-dockercfg-jm8zf\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"metallb-system\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 02 22:49:01 crc kubenswrapper[4755]: W0202 22:49:01.753520 4755 reflector.go:561] object-"metallb-system"/"frr-k8s-certs-secret": failed to list *v1.Secret: secrets "frr-k8s-certs-secret" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "metallb-system": no relationship found between node 'crc' and this object Feb 02 22:49:01 crc kubenswrapper[4755]: E0202 22:49:01.753545 4755 reflector.go:158] "Unhandled Error" err="object-\"metallb-system\"/\"frr-k8s-certs-secret\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"frr-k8s-certs-secret\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"metallb-system\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 02 22:49:01 crc kubenswrapper[4755]: W0202 22:49:01.753585 4755 reflector.go:561] object-"metallb-system"/"frr-startup": failed to list *v1.ConfigMap: configmaps "frr-startup" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "metallb-system": no relationship found between node 'crc' and this object Feb 02 22:49:01 crc kubenswrapper[4755]: E0202 22:49:01.753597 4755 reflector.go:158] "Unhandled Error" err="object-\"metallb-system\"/\"frr-startup\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"frr-startup\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"metallb-system\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.758876 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m"] Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.759800 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.762142 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.776457 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m"] Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.826058 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-9h74b"] Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.826981 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-9h74b" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.829468 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-5gk9f" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.829709 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.829972 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.829987 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.853267 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-v6qqk"] Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.854134 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.857055 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.857285 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e86bcc6c-4300-4fcc-8333-902799e386ad-frr-conf\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.857334 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tkxk\" (UniqueName: \"kubernetes.io/projected/e86bcc6c-4300-4fcc-8333-902799e386ad-kube-api-access-4tkxk\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.857368 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e86bcc6c-4300-4fcc-8333-902799e386ad-frr-startup\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.857432 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e86bcc6c-4300-4fcc-8333-902799e386ad-reloader\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.857489 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e86bcc6c-4300-4fcc-8333-902799e386ad-frr-sockets\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.857516 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e86bcc6c-4300-4fcc-8333-902799e386ad-metrics\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.857536 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e86bcc6c-4300-4fcc-8333-902799e386ad-metrics-certs\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.861366 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-v6qqk"] Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.958773 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/65b275de-548e-4eea-bb10-7f32abf4f838-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-c2x4m\" (UID: \"65b275de-548e-4eea-bb10-7f32abf4f838\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.958833 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-memberlist\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.958856 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-metallb-excludel2\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.958888 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b74qf\" (UniqueName: \"kubernetes.io/projected/cb411fd0-384e-434e-82c7-6d42381d016e-kube-api-access-b74qf\") pod \"controller-6968d8fdc4-v6qqk\" (UID: \"cb411fd0-384e-434e-82c7-6d42381d016e\") " pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.958931 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e86bcc6c-4300-4fcc-8333-902799e386ad-reloader\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.958969 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-metrics-certs\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.958993 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e86bcc6c-4300-4fcc-8333-902799e386ad-frr-sockets\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.959008 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cb411fd0-384e-434e-82c7-6d42381d016e-metrics-certs\") pod \"controller-6968d8fdc4-v6qqk\" (UID: \"cb411fd0-384e-434e-82c7-6d42381d016e\") " pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.959025 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8w5b\" (UniqueName: \"kubernetes.io/projected/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-kube-api-access-j8w5b\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.959041 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e86bcc6c-4300-4fcc-8333-902799e386ad-metrics\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.959056 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e86bcc6c-4300-4fcc-8333-902799e386ad-metrics-certs\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.959073 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb411fd0-384e-434e-82c7-6d42381d016e-cert\") pod \"controller-6968d8fdc4-v6qqk\" (UID: \"cb411fd0-384e-434e-82c7-6d42381d016e\") " pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.959096 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e86bcc6c-4300-4fcc-8333-902799e386ad-frr-conf\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.959115 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tkxk\" (UniqueName: \"kubernetes.io/projected/e86bcc6c-4300-4fcc-8333-902799e386ad-kube-api-access-4tkxk\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.959131 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jmql\" (UniqueName: \"kubernetes.io/projected/65b275de-548e-4eea-bb10-7f32abf4f838-kube-api-access-2jmql\") pod \"frr-k8s-webhook-server-7df86c4f6c-c2x4m\" (UID: \"65b275de-548e-4eea-bb10-7f32abf4f838\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.959151 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e86bcc6c-4300-4fcc-8333-902799e386ad-frr-startup\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.959833 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e86bcc6c-4300-4fcc-8333-902799e386ad-reloader\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.960019 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e86bcc6c-4300-4fcc-8333-902799e386ad-frr-sockets\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.960187 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e86bcc6c-4300-4fcc-8333-902799e386ad-metrics\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.960434 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e86bcc6c-4300-4fcc-8333-902799e386ad-frr-conf\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:01 crc kubenswrapper[4755]: I0202 22:49:01.984232 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tkxk\" (UniqueName: \"kubernetes.io/projected/e86bcc6c-4300-4fcc-8333-902799e386ad-kube-api-access-4tkxk\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.060410 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cb411fd0-384e-434e-82c7-6d42381d016e-metrics-certs\") pod \"controller-6968d8fdc4-v6qqk\" (UID: \"cb411fd0-384e-434e-82c7-6d42381d016e\") " pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.060462 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8w5b\" (UniqueName: \"kubernetes.io/projected/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-kube-api-access-j8w5b\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.060495 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb411fd0-384e-434e-82c7-6d42381d016e-cert\") pod \"controller-6968d8fdc4-v6qqk\" (UID: \"cb411fd0-384e-434e-82c7-6d42381d016e\") " pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.060531 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jmql\" (UniqueName: \"kubernetes.io/projected/65b275de-548e-4eea-bb10-7f32abf4f838-kube-api-access-2jmql\") pod \"frr-k8s-webhook-server-7df86c4f6c-c2x4m\" (UID: \"65b275de-548e-4eea-bb10-7f32abf4f838\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.060565 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/65b275de-548e-4eea-bb10-7f32abf4f838-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-c2x4m\" (UID: \"65b275de-548e-4eea-bb10-7f32abf4f838\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.060589 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-memberlist\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.060608 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-metallb-excludel2\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.060635 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b74qf\" (UniqueName: \"kubernetes.io/projected/cb411fd0-384e-434e-82c7-6d42381d016e-kube-api-access-b74qf\") pod \"controller-6968d8fdc4-v6qqk\" (UID: \"cb411fd0-384e-434e-82c7-6d42381d016e\") " pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.060695 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-metrics-certs\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:02 crc kubenswrapper[4755]: E0202 22:49:02.060838 4755 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Feb 02 22:49:02 crc kubenswrapper[4755]: E0202 22:49:02.060883 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-metrics-certs podName:fa5b9de9-7fe3-4aed-9187-c9660d3f5e38 nodeName:}" failed. No retries permitted until 2026-02-02 22:49:02.560867759 +0000 UTC m=+898.252088085 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-metrics-certs") pod "speaker-9h74b" (UID: "fa5b9de9-7fe3-4aed-9187-c9660d3f5e38") : secret "speaker-certs-secret" not found Feb 02 22:49:02 crc kubenswrapper[4755]: E0202 22:49:02.061078 4755 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 02 22:49:02 crc kubenswrapper[4755]: E0202 22:49:02.061108 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-memberlist podName:fa5b9de9-7fe3-4aed-9187-c9660d3f5e38 nodeName:}" failed. No retries permitted until 2026-02-02 22:49:02.561098075 +0000 UTC m=+898.252318401 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-memberlist") pod "speaker-9h74b" (UID: "fa5b9de9-7fe3-4aed-9187-c9660d3f5e38") : secret "metallb-memberlist" not found Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.061656 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-metallb-excludel2\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.063578 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/65b275de-548e-4eea-bb10-7f32abf4f838-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-c2x4m\" (UID: \"65b275de-548e-4eea-bb10-7f32abf4f838\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.063861 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cb411fd0-384e-434e-82c7-6d42381d016e-metrics-certs\") pod \"controller-6968d8fdc4-v6qqk\" (UID: \"cb411fd0-384e-434e-82c7-6d42381d016e\") " pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.064142 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb411fd0-384e-434e-82c7-6d42381d016e-cert\") pod \"controller-6968d8fdc4-v6qqk\" (UID: \"cb411fd0-384e-434e-82c7-6d42381d016e\") " pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.076128 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b74qf\" (UniqueName: \"kubernetes.io/projected/cb411fd0-384e-434e-82c7-6d42381d016e-kube-api-access-b74qf\") pod \"controller-6968d8fdc4-v6qqk\" (UID: \"cb411fd0-384e-434e-82c7-6d42381d016e\") " pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.079071 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jmql\" (UniqueName: \"kubernetes.io/projected/65b275de-548e-4eea-bb10-7f32abf4f838-kube-api-access-2jmql\") pod \"frr-k8s-webhook-server-7df86c4f6c-c2x4m\" (UID: \"65b275de-548e-4eea-bb10-7f32abf4f838\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.082103 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8w5b\" (UniqueName: \"kubernetes.io/projected/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-kube-api-access-j8w5b\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.220258 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.465561 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-v6qqk"] Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.567865 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-memberlist\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:02 crc kubenswrapper[4755]: E0202 22:49:02.567993 4755 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 02 22:49:02 crc kubenswrapper[4755]: E0202 22:49:02.568059 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-memberlist podName:fa5b9de9-7fe3-4aed-9187-c9660d3f5e38 nodeName:}" failed. No retries permitted until 2026-02-02 22:49:03.56804294 +0000 UTC m=+899.259263266 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-memberlist") pod "speaker-9h74b" (UID: "fa5b9de9-7fe3-4aed-9187-c9660d3f5e38") : secret "metallb-memberlist" not found Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.568078 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-metrics-certs\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.574386 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-metrics-certs\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.791648 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-jm8zf" Feb 02 22:49:02 crc kubenswrapper[4755]: I0202 22:49:02.799718 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" Feb 02 22:49:02 crc kubenswrapper[4755]: E0202 22:49:02.959876 4755 configmap.go:193] Couldn't get configMap metallb-system/frr-startup: failed to sync configmap cache: timed out waiting for the condition Feb 02 22:49:02 crc kubenswrapper[4755]: E0202 22:49:02.959999 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e86bcc6c-4300-4fcc-8333-902799e386ad-frr-startup podName:e86bcc6c-4300-4fcc-8333-902799e386ad nodeName:}" failed. No retries permitted until 2026-02-02 22:49:03.459971145 +0000 UTC m=+899.151191481 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "frr-startup" (UniqueName: "kubernetes.io/configmap/e86bcc6c-4300-4fcc-8333-902799e386ad-frr-startup") pod "frr-k8s-g4qwg" (UID: "e86bcc6c-4300-4fcc-8333-902799e386ad") : failed to sync configmap cache: timed out waiting for the condition Feb 02 22:49:02 crc kubenswrapper[4755]: E0202 22:49:02.961064 4755 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: failed to sync secret cache: timed out waiting for the condition Feb 02 22:49:02 crc kubenswrapper[4755]: E0202 22:49:02.961155 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e86bcc6c-4300-4fcc-8333-902799e386ad-metrics-certs podName:e86bcc6c-4300-4fcc-8333-902799e386ad nodeName:}" failed. No retries permitted until 2026-02-02 22:49:03.461129617 +0000 UTC m=+899.152350003 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e86bcc6c-4300-4fcc-8333-902799e386ad-metrics-certs") pod "frr-k8s-g4qwg" (UID: "e86bcc6c-4300-4fcc-8333-902799e386ad") : failed to sync secret cache: timed out waiting for the condition Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.086204 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.199939 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-v6qqk" event={"ID":"cb411fd0-384e-434e-82c7-6d42381d016e","Type":"ContainerStarted","Data":"49c30aedca0f59bbd675c1b0897e3430222a3c804fc336ff8a3cb52842e3ec6f"} Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.200004 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-v6qqk" event={"ID":"cb411fd0-384e-434e-82c7-6d42381d016e","Type":"ContainerStarted","Data":"9b725f538fc35fee88f01a852883814e20bc6a5af6917afdf4435286feabcd74"} Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.200026 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-v6qqk" event={"ID":"cb411fd0-384e-434e-82c7-6d42381d016e","Type":"ContainerStarted","Data":"d1af0516062d6c26056704142a1b13f93d54aee19b516b701d15c1d467134566"} Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.200118 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.220481 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-v6qqk" podStartSLOduration=2.220444343 podStartE2EDuration="2.220444343s" podCreationTimestamp="2026-02-02 22:49:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:49:03.219578499 +0000 UTC m=+898.910798885" watchObservedRunningTime="2026-02-02 22:49:03.220444343 +0000 UTC m=+898.911664709" Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.323806 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m"] Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.347397 4755 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.481260 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e86bcc6c-4300-4fcc-8333-902799e386ad-metrics-certs\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.481331 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e86bcc6c-4300-4fcc-8333-902799e386ad-frr-startup\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.482456 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e86bcc6c-4300-4fcc-8333-902799e386ad-frr-startup\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.488480 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e86bcc6c-4300-4fcc-8333-902799e386ad-metrics-certs\") pod \"frr-k8s-g4qwg\" (UID: \"e86bcc6c-4300-4fcc-8333-902799e386ad\") " pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.570380 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.583529 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-memberlist\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.588774 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fa5b9de9-7fe3-4aed-9187-c9660d3f5e38-memberlist\") pod \"speaker-9h74b\" (UID: \"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38\") " pod="metallb-system/speaker-9h74b" Feb 02 22:49:03 crc kubenswrapper[4755]: I0202 22:49:03.642595 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-9h74b" Feb 02 22:49:03 crc kubenswrapper[4755]: W0202 22:49:03.696035 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa5b9de9_7fe3_4aed_9187_c9660d3f5e38.slice/crio-9645f520218bd3775e86ee7659c3d91361d45e04828047004bec98b8d69b8463 WatchSource:0}: Error finding container 9645f520218bd3775e86ee7659c3d91361d45e04828047004bec98b8d69b8463: Status 404 returned error can't find the container with id 9645f520218bd3775e86ee7659c3d91361d45e04828047004bec98b8d69b8463 Feb 02 22:49:04 crc kubenswrapper[4755]: I0202 22:49:04.220456 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-g4qwg" event={"ID":"e86bcc6c-4300-4fcc-8333-902799e386ad","Type":"ContainerStarted","Data":"6494ef4bf0d1ad051870c66e4c6560723c203390d9d63ed2e681839716c9f815"} Feb 02 22:49:04 crc kubenswrapper[4755]: I0202 22:49:04.223239 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" event={"ID":"65b275de-548e-4eea-bb10-7f32abf4f838","Type":"ContainerStarted","Data":"c0e36b61b1c996ce98069f26dfe0a91dbd3de22bdb83e2cf782e0f4fd08a786f"} Feb 02 22:49:04 crc kubenswrapper[4755]: I0202 22:49:04.229865 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-9h74b" event={"ID":"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38","Type":"ContainerStarted","Data":"6e349877e3eac56d22246b931ec9787a4b39f52b94f97eda8ae2b119c97a753d"} Feb 02 22:49:04 crc kubenswrapper[4755]: I0202 22:49:04.229915 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-9h74b" event={"ID":"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38","Type":"ContainerStarted","Data":"9645f520218bd3775e86ee7659c3d91361d45e04828047004bec98b8d69b8463"} Feb 02 22:49:05 crc kubenswrapper[4755]: I0202 22:49:05.256274 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-9h74b" event={"ID":"fa5b9de9-7fe3-4aed-9187-c9660d3f5e38","Type":"ContainerStarted","Data":"12a0b0edfe42a9ea3b32ec11622e6d4733373236d95473a13c54ba5907907024"} Feb 02 22:49:05 crc kubenswrapper[4755]: I0202 22:49:05.257038 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-9h74b" Feb 02 22:49:05 crc kubenswrapper[4755]: I0202 22:49:05.285059 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-9h74b" podStartSLOduration=4.284994775 podStartE2EDuration="4.284994775s" podCreationTimestamp="2026-02-02 22:49:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:49:05.274249587 +0000 UTC m=+900.965469913" watchObservedRunningTime="2026-02-02 22:49:05.284994775 +0000 UTC m=+900.976215101" Feb 02 22:49:11 crc kubenswrapper[4755]: I0202 22:49:11.301333 4755 generic.go:334] "Generic (PLEG): container finished" podID="e86bcc6c-4300-4fcc-8333-902799e386ad" containerID="ab4744bf821e9e23d14b8074046c49f54f6cbe60b03d111ea78f8f4132385f77" exitCode=0 Feb 02 22:49:11 crc kubenswrapper[4755]: I0202 22:49:11.301466 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-g4qwg" event={"ID":"e86bcc6c-4300-4fcc-8333-902799e386ad","Type":"ContainerDied","Data":"ab4744bf821e9e23d14b8074046c49f54f6cbe60b03d111ea78f8f4132385f77"} Feb 02 22:49:11 crc kubenswrapper[4755]: I0202 22:49:11.304602 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" event={"ID":"65b275de-548e-4eea-bb10-7f32abf4f838","Type":"ContainerStarted","Data":"30731ace666eaee7ff088cea817fce446735654f261d336901af3a82b7f3ae8c"} Feb 02 22:49:11 crc kubenswrapper[4755]: I0202 22:49:11.304795 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" Feb 02 22:49:11 crc kubenswrapper[4755]: I0202 22:49:11.381402 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" podStartSLOduration=3.039501521 podStartE2EDuration="10.381373308s" podCreationTimestamp="2026-02-02 22:49:01 +0000 UTC" firstStartedPulling="2026-02-02 22:49:03.328338065 +0000 UTC m=+899.019558431" lastFinishedPulling="2026-02-02 22:49:10.670209882 +0000 UTC m=+906.361430218" observedRunningTime="2026-02-02 22:49:11.377964473 +0000 UTC m=+907.069184869" watchObservedRunningTime="2026-02-02 22:49:11.381373308 +0000 UTC m=+907.072593674" Feb 02 22:49:12 crc kubenswrapper[4755]: I0202 22:49:12.226674 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-v6qqk" Feb 02 22:49:12 crc kubenswrapper[4755]: I0202 22:49:12.315696 4755 generic.go:334] "Generic (PLEG): container finished" podID="e86bcc6c-4300-4fcc-8333-902799e386ad" containerID="c83539714b6d0f80b9225a5b1c36853aec55f24ac890e069150336da858eaddf" exitCode=0 Feb 02 22:49:12 crc kubenswrapper[4755]: I0202 22:49:12.315774 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-g4qwg" event={"ID":"e86bcc6c-4300-4fcc-8333-902799e386ad","Type":"ContainerDied","Data":"c83539714b6d0f80b9225a5b1c36853aec55f24ac890e069150336da858eaddf"} Feb 02 22:49:13 crc kubenswrapper[4755]: I0202 22:49:13.327699 4755 generic.go:334] "Generic (PLEG): container finished" podID="e86bcc6c-4300-4fcc-8333-902799e386ad" containerID="75f67cd6981abd0519c197f282956b5cb104dcdb52ffda71660d4f22fe12fb2d" exitCode=0 Feb 02 22:49:13 crc kubenswrapper[4755]: I0202 22:49:13.327788 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-g4qwg" event={"ID":"e86bcc6c-4300-4fcc-8333-902799e386ad","Type":"ContainerDied","Data":"75f67cd6981abd0519c197f282956b5cb104dcdb52ffda71660d4f22fe12fb2d"} Feb 02 22:49:13 crc kubenswrapper[4755]: I0202 22:49:13.646268 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-9h74b" Feb 02 22:49:14 crc kubenswrapper[4755]: I0202 22:49:14.338748 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-g4qwg" event={"ID":"e86bcc6c-4300-4fcc-8333-902799e386ad","Type":"ContainerStarted","Data":"60c0b68aee4d921a398d91f81373abced848132e93a37a880ee3eb151063ee01"} Feb 02 22:49:14 crc kubenswrapper[4755]: I0202 22:49:14.339483 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-g4qwg" event={"ID":"e86bcc6c-4300-4fcc-8333-902799e386ad","Type":"ContainerStarted","Data":"814c9e7a1a178b96f9d827d1890ff8feca08032db6dc6f07eb124daf23ececb7"} Feb 02 22:49:14 crc kubenswrapper[4755]: I0202 22:49:14.339575 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-g4qwg" event={"ID":"e86bcc6c-4300-4fcc-8333-902799e386ad","Type":"ContainerStarted","Data":"e6a13168300dd10bc15161fd4dee3ff6c65dae5a40b25d45b2979f98c898f0be"} Feb 02 22:49:14 crc kubenswrapper[4755]: I0202 22:49:14.339665 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-g4qwg" event={"ID":"e86bcc6c-4300-4fcc-8333-902799e386ad","Type":"ContainerStarted","Data":"75ad2d24d6a93266b868898fb20bffa5fb7e982fbf4b34bbb78277e39b2e341c"} Feb 02 22:49:15 crc kubenswrapper[4755]: I0202 22:49:15.352376 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-g4qwg" event={"ID":"e86bcc6c-4300-4fcc-8333-902799e386ad","Type":"ContainerStarted","Data":"237826cd132d73ebe084a7e86de938c7c752ab62050b1974d6c3f290c6917bd3"} Feb 02 22:49:15 crc kubenswrapper[4755]: I0202 22:49:15.352848 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-g4qwg" event={"ID":"e86bcc6c-4300-4fcc-8333-902799e386ad","Type":"ContainerStarted","Data":"7fab961473eba48306d9ead83cf16733de982ad0f72f0370d2da88cd773d9dfd"} Feb 02 22:49:15 crc kubenswrapper[4755]: I0202 22:49:15.352887 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:15 crc kubenswrapper[4755]: I0202 22:49:15.384550 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-g4qwg" podStartSLOduration=7.497157539 podStartE2EDuration="14.38453323s" podCreationTimestamp="2026-02-02 22:49:01 +0000 UTC" firstStartedPulling="2026-02-02 22:49:03.756297582 +0000 UTC m=+899.447517908" lastFinishedPulling="2026-02-02 22:49:10.643673233 +0000 UTC m=+906.334893599" observedRunningTime="2026-02-02 22:49:15.380510158 +0000 UTC m=+911.071730494" watchObservedRunningTime="2026-02-02 22:49:15.38453323 +0000 UTC m=+911.075753556" Feb 02 22:49:16 crc kubenswrapper[4755]: I0202 22:49:16.633761 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-6nfrk"] Feb 02 22:49:16 crc kubenswrapper[4755]: I0202 22:49:16.637195 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6nfrk" Feb 02 22:49:16 crc kubenswrapper[4755]: I0202 22:49:16.645314 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Feb 02 22:49:16 crc kubenswrapper[4755]: I0202 22:49:16.645518 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Feb 02 22:49:16 crc kubenswrapper[4755]: I0202 22:49:16.645669 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-gqtjp" Feb 02 22:49:16 crc kubenswrapper[4755]: I0202 22:49:16.654756 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6nfrk"] Feb 02 22:49:16 crc kubenswrapper[4755]: I0202 22:49:16.778784 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2lwq\" (UniqueName: \"kubernetes.io/projected/8dde2366-fdae-4ff2-adde-7f51d32af165-kube-api-access-q2lwq\") pod \"openstack-operator-index-6nfrk\" (UID: \"8dde2366-fdae-4ff2-adde-7f51d32af165\") " pod="openstack-operators/openstack-operator-index-6nfrk" Feb 02 22:49:16 crc kubenswrapper[4755]: I0202 22:49:16.880010 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2lwq\" (UniqueName: \"kubernetes.io/projected/8dde2366-fdae-4ff2-adde-7f51d32af165-kube-api-access-q2lwq\") pod \"openstack-operator-index-6nfrk\" (UID: \"8dde2366-fdae-4ff2-adde-7f51d32af165\") " pod="openstack-operators/openstack-operator-index-6nfrk" Feb 02 22:49:16 crc kubenswrapper[4755]: I0202 22:49:16.905705 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2lwq\" (UniqueName: \"kubernetes.io/projected/8dde2366-fdae-4ff2-adde-7f51d32af165-kube-api-access-q2lwq\") pod \"openstack-operator-index-6nfrk\" (UID: \"8dde2366-fdae-4ff2-adde-7f51d32af165\") " pod="openstack-operators/openstack-operator-index-6nfrk" Feb 02 22:49:16 crc kubenswrapper[4755]: I0202 22:49:16.969951 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6nfrk" Feb 02 22:49:17 crc kubenswrapper[4755]: W0202 22:49:17.448998 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8dde2366_fdae_4ff2_adde_7f51d32af165.slice/crio-b6e05266826124d1aa23ffb7372610561273a31157cd0325de1be3c7f64c73ba WatchSource:0}: Error finding container b6e05266826124d1aa23ffb7372610561273a31157cd0325de1be3c7f64c73ba: Status 404 returned error can't find the container with id b6e05266826124d1aa23ffb7372610561273a31157cd0325de1be3c7f64c73ba Feb 02 22:49:17 crc kubenswrapper[4755]: I0202 22:49:17.460512 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6nfrk"] Feb 02 22:49:18 crc kubenswrapper[4755]: I0202 22:49:18.378180 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6nfrk" event={"ID":"8dde2366-fdae-4ff2-adde-7f51d32af165","Type":"ContainerStarted","Data":"b6e05266826124d1aa23ffb7372610561273a31157cd0325de1be3c7f64c73ba"} Feb 02 22:49:18 crc kubenswrapper[4755]: I0202 22:49:18.571496 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:18 crc kubenswrapper[4755]: I0202 22:49:18.606636 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:20 crc kubenswrapper[4755]: I0202 22:49:20.399981 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6nfrk" event={"ID":"8dde2366-fdae-4ff2-adde-7f51d32af165","Type":"ContainerStarted","Data":"aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64"} Feb 02 22:49:20 crc kubenswrapper[4755]: I0202 22:49:20.423913 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-6nfrk" podStartSLOduration=2.165552048 podStartE2EDuration="4.423882853s" podCreationTimestamp="2026-02-02 22:49:16 +0000 UTC" firstStartedPulling="2026-02-02 22:49:17.461619562 +0000 UTC m=+913.152839888" lastFinishedPulling="2026-02-02 22:49:19.719950347 +0000 UTC m=+915.411170693" observedRunningTime="2026-02-02 22:49:20.420849668 +0000 UTC m=+916.112070054" watchObservedRunningTime="2026-02-02 22:49:20.423882853 +0000 UTC m=+916.115103219" Feb 02 22:49:21 crc kubenswrapper[4755]: I0202 22:49:21.786898 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-6nfrk"] Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.387214 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-njmg8"] Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.388380 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-njmg8" Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.403514 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-njmg8"] Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.415495 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-6nfrk" podUID="8dde2366-fdae-4ff2-adde-7f51d32af165" containerName="registry-server" containerID="cri-o://aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64" gracePeriod=2 Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.459691 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkhd2\" (UniqueName: \"kubernetes.io/projected/fff245fb-8d48-4499-8963-1efcf0705321-kube-api-access-qkhd2\") pod \"openstack-operator-index-njmg8\" (UID: \"fff245fb-8d48-4499-8963-1efcf0705321\") " pod="openstack-operators/openstack-operator-index-njmg8" Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.561268 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkhd2\" (UniqueName: \"kubernetes.io/projected/fff245fb-8d48-4499-8963-1efcf0705321-kube-api-access-qkhd2\") pod \"openstack-operator-index-njmg8\" (UID: \"fff245fb-8d48-4499-8963-1efcf0705321\") " pod="openstack-operators/openstack-operator-index-njmg8" Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.590211 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkhd2\" (UniqueName: \"kubernetes.io/projected/fff245fb-8d48-4499-8963-1efcf0705321-kube-api-access-qkhd2\") pod \"openstack-operator-index-njmg8\" (UID: \"fff245fb-8d48-4499-8963-1efcf0705321\") " pod="openstack-operators/openstack-operator-index-njmg8" Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.711104 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-njmg8" Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.815151 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-c2x4m" Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.839142 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6nfrk" Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.864691 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2lwq\" (UniqueName: \"kubernetes.io/projected/8dde2366-fdae-4ff2-adde-7f51d32af165-kube-api-access-q2lwq\") pod \"8dde2366-fdae-4ff2-adde-7f51d32af165\" (UID: \"8dde2366-fdae-4ff2-adde-7f51d32af165\") " Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.871483 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dde2366-fdae-4ff2-adde-7f51d32af165-kube-api-access-q2lwq" (OuterVolumeSpecName: "kube-api-access-q2lwq") pod "8dde2366-fdae-4ff2-adde-7f51d32af165" (UID: "8dde2366-fdae-4ff2-adde-7f51d32af165"). InnerVolumeSpecName "kube-api-access-q2lwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:49:22 crc kubenswrapper[4755]: I0202 22:49:22.966602 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2lwq\" (UniqueName: \"kubernetes.io/projected/8dde2366-fdae-4ff2-adde-7f51d32af165-kube-api-access-q2lwq\") on node \"crc\" DevicePath \"\"" Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.149217 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-njmg8"] Feb 02 22:49:23 crc kubenswrapper[4755]: W0202 22:49:23.159382 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfff245fb_8d48_4499_8963_1efcf0705321.slice/crio-f85f3db1f32dbd85e4a182f7be406b9f21a4b4c72c6dd32f3d976d80573af729 WatchSource:0}: Error finding container f85f3db1f32dbd85e4a182f7be406b9f21a4b4c72c6dd32f3d976d80573af729: Status 404 returned error can't find the container with id f85f3db1f32dbd85e4a182f7be406b9f21a4b4c72c6dd32f3d976d80573af729 Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.390597 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.391027 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.426511 4755 generic.go:334] "Generic (PLEG): container finished" podID="8dde2366-fdae-4ff2-adde-7f51d32af165" containerID="aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64" exitCode=0 Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.426562 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6nfrk" event={"ID":"8dde2366-fdae-4ff2-adde-7f51d32af165","Type":"ContainerDied","Data":"aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64"} Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.426622 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6nfrk" event={"ID":"8dde2366-fdae-4ff2-adde-7f51d32af165","Type":"ContainerDied","Data":"b6e05266826124d1aa23ffb7372610561273a31157cd0325de1be3c7f64c73ba"} Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.426629 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6nfrk" Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.426691 4755 scope.go:117] "RemoveContainer" containerID="aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64" Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.428557 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-njmg8" event={"ID":"fff245fb-8d48-4499-8963-1efcf0705321","Type":"ContainerStarted","Data":"0b25f4d99dcdd19d2d8c1aa89cc1aea95c7007321418d5ba07d5eb973def75a0"} Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.428593 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-njmg8" event={"ID":"fff245fb-8d48-4499-8963-1efcf0705321","Type":"ContainerStarted","Data":"f85f3db1f32dbd85e4a182f7be406b9f21a4b4c72c6dd32f3d976d80573af729"} Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.451074 4755 scope.go:117] "RemoveContainer" containerID="aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64" Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.452564 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-njmg8" podStartSLOduration=1.397443008 podStartE2EDuration="1.452538961s" podCreationTimestamp="2026-02-02 22:49:22 +0000 UTC" firstStartedPulling="2026-02-02 22:49:23.163839658 +0000 UTC m=+918.855060024" lastFinishedPulling="2026-02-02 22:49:23.218935621 +0000 UTC m=+918.910155977" observedRunningTime="2026-02-02 22:49:23.447268854 +0000 UTC m=+919.138489190" watchObservedRunningTime="2026-02-02 22:49:23.452538961 +0000 UTC m=+919.143759317" Feb 02 22:49:23 crc kubenswrapper[4755]: E0202 22:49:23.453606 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64\": container with ID starting with aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64 not found: ID does not exist" containerID="aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64" Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.453649 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64"} err="failed to get container status \"aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64\": rpc error: code = NotFound desc = could not find container \"aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64\": container with ID starting with aa96d58ea3ef8cbd9eb8f3fb058903b8fd5142f0d9c332218ad1dbc9d1f6bb64 not found: ID does not exist" Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.465570 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-6nfrk"] Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.473352 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-6nfrk"] Feb 02 22:49:23 crc kubenswrapper[4755]: I0202 22:49:23.575208 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-g4qwg" Feb 02 22:49:25 crc kubenswrapper[4755]: I0202 22:49:25.077502 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dde2366-fdae-4ff2-adde-7f51d32af165" path="/var/lib/kubelet/pods/8dde2366-fdae-4ff2-adde-7f51d32af165/volumes" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.597268 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9wl4f"] Feb 02 22:49:31 crc kubenswrapper[4755]: E0202 22:49:31.598697 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dde2366-fdae-4ff2-adde-7f51d32af165" containerName="registry-server" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.598725 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dde2366-fdae-4ff2-adde-7f51d32af165" containerName="registry-server" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.599017 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dde2366-fdae-4ff2-adde-7f51d32af165" containerName="registry-server" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.600634 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.615883 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9wl4f"] Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.687441 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ca1c569-aff0-4334-8317-137544ac5205-catalog-content\") pod \"certified-operators-9wl4f\" (UID: \"0ca1c569-aff0-4334-8317-137544ac5205\") " pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.687514 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf5h6\" (UniqueName: \"kubernetes.io/projected/0ca1c569-aff0-4334-8317-137544ac5205-kube-api-access-cf5h6\") pod \"certified-operators-9wl4f\" (UID: \"0ca1c569-aff0-4334-8317-137544ac5205\") " pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.687610 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ca1c569-aff0-4334-8317-137544ac5205-utilities\") pod \"certified-operators-9wl4f\" (UID: \"0ca1c569-aff0-4334-8317-137544ac5205\") " pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.789322 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ca1c569-aff0-4334-8317-137544ac5205-catalog-content\") pod \"certified-operators-9wl4f\" (UID: \"0ca1c569-aff0-4334-8317-137544ac5205\") " pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.789445 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf5h6\" (UniqueName: \"kubernetes.io/projected/0ca1c569-aff0-4334-8317-137544ac5205-kube-api-access-cf5h6\") pod \"certified-operators-9wl4f\" (UID: \"0ca1c569-aff0-4334-8317-137544ac5205\") " pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.789571 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ca1c569-aff0-4334-8317-137544ac5205-utilities\") pod \"certified-operators-9wl4f\" (UID: \"0ca1c569-aff0-4334-8317-137544ac5205\") " pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.789945 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ca1c569-aff0-4334-8317-137544ac5205-catalog-content\") pod \"certified-operators-9wl4f\" (UID: \"0ca1c569-aff0-4334-8317-137544ac5205\") " pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.790307 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ca1c569-aff0-4334-8317-137544ac5205-utilities\") pod \"certified-operators-9wl4f\" (UID: \"0ca1c569-aff0-4334-8317-137544ac5205\") " pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.815144 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf5h6\" (UniqueName: \"kubernetes.io/projected/0ca1c569-aff0-4334-8317-137544ac5205-kube-api-access-cf5h6\") pod \"certified-operators-9wl4f\" (UID: \"0ca1c569-aff0-4334-8317-137544ac5205\") " pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:31 crc kubenswrapper[4755]: I0202 22:49:31.937530 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:32 crc kubenswrapper[4755]: I0202 22:49:32.416159 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9wl4f"] Feb 02 22:49:32 crc kubenswrapper[4755]: W0202 22:49:32.417966 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ca1c569_aff0_4334_8317_137544ac5205.slice/crio-c03a51038eb87a82422393703c5e2772065cffc5f0f0427e3c413a58d3689211 WatchSource:0}: Error finding container c03a51038eb87a82422393703c5e2772065cffc5f0f0427e3c413a58d3689211: Status 404 returned error can't find the container with id c03a51038eb87a82422393703c5e2772065cffc5f0f0427e3c413a58d3689211 Feb 02 22:49:32 crc kubenswrapper[4755]: I0202 22:49:32.493291 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wl4f" event={"ID":"0ca1c569-aff0-4334-8317-137544ac5205","Type":"ContainerStarted","Data":"c03a51038eb87a82422393703c5e2772065cffc5f0f0427e3c413a58d3689211"} Feb 02 22:49:32 crc kubenswrapper[4755]: I0202 22:49:32.711768 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-njmg8" Feb 02 22:49:32 crc kubenswrapper[4755]: I0202 22:49:32.711810 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-njmg8" Feb 02 22:49:32 crc kubenswrapper[4755]: I0202 22:49:32.740396 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-njmg8" Feb 02 22:49:33 crc kubenswrapper[4755]: I0202 22:49:33.502320 4755 generic.go:334] "Generic (PLEG): container finished" podID="0ca1c569-aff0-4334-8317-137544ac5205" containerID="c546458f5c106f3cb59ff583ea3244c43aa7a679dbab7a16b2a7f4443954e5b3" exitCode=0 Feb 02 22:49:33 crc kubenswrapper[4755]: I0202 22:49:33.502412 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wl4f" event={"ID":"0ca1c569-aff0-4334-8317-137544ac5205","Type":"ContainerDied","Data":"c546458f5c106f3cb59ff583ea3244c43aa7a679dbab7a16b2a7f4443954e5b3"} Feb 02 22:49:33 crc kubenswrapper[4755]: I0202 22:49:33.556604 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-njmg8" Feb 02 22:49:35 crc kubenswrapper[4755]: I0202 22:49:35.518000 4755 generic.go:334] "Generic (PLEG): container finished" podID="0ca1c569-aff0-4334-8317-137544ac5205" containerID="619754f5b97ed79cc5369ae62ebb90cd7c3d018a43784bd535b772670d4b34ff" exitCode=0 Feb 02 22:49:35 crc kubenswrapper[4755]: I0202 22:49:35.518062 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wl4f" event={"ID":"0ca1c569-aff0-4334-8317-137544ac5205","Type":"ContainerDied","Data":"619754f5b97ed79cc5369ae62ebb90cd7c3d018a43784bd535b772670d4b34ff"} Feb 02 22:49:36 crc kubenswrapper[4755]: I0202 22:49:36.530874 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wl4f" event={"ID":"0ca1c569-aff0-4334-8317-137544ac5205","Type":"ContainerStarted","Data":"88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d"} Feb 02 22:49:36 crc kubenswrapper[4755]: I0202 22:49:36.550771 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9wl4f" podStartSLOduration=2.923958102 podStartE2EDuration="5.550715358s" podCreationTimestamp="2026-02-02 22:49:31 +0000 UTC" firstStartedPulling="2026-02-02 22:49:33.504589024 +0000 UTC m=+929.195809380" lastFinishedPulling="2026-02-02 22:49:36.1313463 +0000 UTC m=+931.822566636" observedRunningTime="2026-02-02 22:49:36.548837266 +0000 UTC m=+932.240057652" watchObservedRunningTime="2026-02-02 22:49:36.550715358 +0000 UTC m=+932.241935724" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.645866 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn"] Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.648429 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.651356 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-grzs5" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.673970 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn"] Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.677417 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmdw2\" (UniqueName: \"kubernetes.io/projected/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-kube-api-access-qmdw2\") pod \"96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn\" (UID: \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\") " pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.677515 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-util\") pod \"96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn\" (UID: \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\") " pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.677634 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-bundle\") pod \"96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn\" (UID: \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\") " pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.778370 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmdw2\" (UniqueName: \"kubernetes.io/projected/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-kube-api-access-qmdw2\") pod \"96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn\" (UID: \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\") " pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.778685 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-util\") pod \"96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn\" (UID: \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\") " pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.778833 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-bundle\") pod \"96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn\" (UID: \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\") " pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.779545 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-util\") pod \"96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn\" (UID: \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\") " pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.779634 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-bundle\") pod \"96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn\" (UID: \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\") " pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.804420 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmdw2\" (UniqueName: \"kubernetes.io/projected/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-kube-api-access-qmdw2\") pod \"96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn\" (UID: \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\") " pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.937972 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.938632 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:41 crc kubenswrapper[4755]: I0202 22:49:41.982078 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:42 crc kubenswrapper[4755]: I0202 22:49:42.002663 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:42 crc kubenswrapper[4755]: I0202 22:49:42.446636 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn"] Feb 02 22:49:42 crc kubenswrapper[4755]: I0202 22:49:42.578139 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" event={"ID":"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b","Type":"ContainerStarted","Data":"237aa802a7cab6e8af2f6a1ad77ac5e087336c6813227ace1747167f960c23c5"} Feb 02 22:49:42 crc kubenswrapper[4755]: I0202 22:49:42.637385 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:43 crc kubenswrapper[4755]: I0202 22:49:43.585246 4755 generic.go:334] "Generic (PLEG): container finished" podID="137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" containerID="9c730195e9302ccf74f1cd54828cb41383c314d0b00d9d26f158f242a5c70e96" exitCode=0 Feb 02 22:49:43 crc kubenswrapper[4755]: I0202 22:49:43.585366 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" event={"ID":"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b","Type":"ContainerDied","Data":"9c730195e9302ccf74f1cd54828cb41383c314d0b00d9d26f158f242a5c70e96"} Feb 02 22:49:43 crc kubenswrapper[4755]: I0202 22:49:43.587570 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 22:49:44 crc kubenswrapper[4755]: I0202 22:49:44.595384 4755 generic.go:334] "Generic (PLEG): container finished" podID="137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" containerID="557fb6a6f28594ff07eee55bf718b5415c3338e5407e796d579f5c57af0ae67f" exitCode=0 Feb 02 22:49:44 crc kubenswrapper[4755]: I0202 22:49:44.595491 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" event={"ID":"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b","Type":"ContainerDied","Data":"557fb6a6f28594ff07eee55bf718b5415c3338e5407e796d579f5c57af0ae67f"} Feb 02 22:49:44 crc kubenswrapper[4755]: I0202 22:49:44.978984 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9wl4f"] Feb 02 22:49:44 crc kubenswrapper[4755]: I0202 22:49:44.979429 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9wl4f" podUID="0ca1c569-aff0-4334-8317-137544ac5205" containerName="registry-server" containerID="cri-o://88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d" gracePeriod=2 Feb 02 22:49:45 crc kubenswrapper[4755]: I0202 22:49:45.605145 4755 generic.go:334] "Generic (PLEG): container finished" podID="137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" containerID="0d668e90dd954b986b2839b8a05e1256c025e665f0f6926fbac4c21bbda0d049" exitCode=0 Feb 02 22:49:45 crc kubenswrapper[4755]: I0202 22:49:45.605207 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" event={"ID":"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b","Type":"ContainerDied","Data":"0d668e90dd954b986b2839b8a05e1256c025e665f0f6926fbac4c21bbda0d049"} Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.119446 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.245066 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cf5h6\" (UniqueName: \"kubernetes.io/projected/0ca1c569-aff0-4334-8317-137544ac5205-kube-api-access-cf5h6\") pod \"0ca1c569-aff0-4334-8317-137544ac5205\" (UID: \"0ca1c569-aff0-4334-8317-137544ac5205\") " Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.246016 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ca1c569-aff0-4334-8317-137544ac5205-catalog-content\") pod \"0ca1c569-aff0-4334-8317-137544ac5205\" (UID: \"0ca1c569-aff0-4334-8317-137544ac5205\") " Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.246154 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ca1c569-aff0-4334-8317-137544ac5205-utilities\") pod \"0ca1c569-aff0-4334-8317-137544ac5205\" (UID: \"0ca1c569-aff0-4334-8317-137544ac5205\") " Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.247672 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ca1c569-aff0-4334-8317-137544ac5205-utilities" (OuterVolumeSpecName: "utilities") pod "0ca1c569-aff0-4334-8317-137544ac5205" (UID: "0ca1c569-aff0-4334-8317-137544ac5205"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.256289 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ca1c569-aff0-4334-8317-137544ac5205-kube-api-access-cf5h6" (OuterVolumeSpecName: "kube-api-access-cf5h6") pod "0ca1c569-aff0-4334-8317-137544ac5205" (UID: "0ca1c569-aff0-4334-8317-137544ac5205"). InnerVolumeSpecName "kube-api-access-cf5h6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.302360 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ca1c569-aff0-4334-8317-137544ac5205-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ca1c569-aff0-4334-8317-137544ac5205" (UID: "0ca1c569-aff0-4334-8317-137544ac5205"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.348644 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ca1c569-aff0-4334-8317-137544ac5205-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.348684 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cf5h6\" (UniqueName: \"kubernetes.io/projected/0ca1c569-aff0-4334-8317-137544ac5205-kube-api-access-cf5h6\") on node \"crc\" DevicePath \"\"" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.348700 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ca1c569-aff0-4334-8317-137544ac5205-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.618030 4755 generic.go:334] "Generic (PLEG): container finished" podID="0ca1c569-aff0-4334-8317-137544ac5205" containerID="88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d" exitCode=0 Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.618097 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9wl4f" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.618117 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wl4f" event={"ID":"0ca1c569-aff0-4334-8317-137544ac5205","Type":"ContainerDied","Data":"88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d"} Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.618715 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wl4f" event={"ID":"0ca1c569-aff0-4334-8317-137544ac5205","Type":"ContainerDied","Data":"c03a51038eb87a82422393703c5e2772065cffc5f0f0427e3c413a58d3689211"} Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.618786 4755 scope.go:117] "RemoveContainer" containerID="88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.677283 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9wl4f"] Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.681684 4755 scope.go:117] "RemoveContainer" containerID="619754f5b97ed79cc5369ae62ebb90cd7c3d018a43784bd535b772670d4b34ff" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.693189 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9wl4f"] Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.714641 4755 scope.go:117] "RemoveContainer" containerID="c546458f5c106f3cb59ff583ea3244c43aa7a679dbab7a16b2a7f4443954e5b3" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.733512 4755 scope.go:117] "RemoveContainer" containerID="88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d" Feb 02 22:49:46 crc kubenswrapper[4755]: E0202 22:49:46.734175 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d\": container with ID starting with 88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d not found: ID does not exist" containerID="88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.734235 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d"} err="failed to get container status \"88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d\": rpc error: code = NotFound desc = could not find container \"88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d\": container with ID starting with 88a08d9c9c3d630d55311f1e9a64eb749c50f63939cf4423390e72aa5b4f593d not found: ID does not exist" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.734266 4755 scope.go:117] "RemoveContainer" containerID="619754f5b97ed79cc5369ae62ebb90cd7c3d018a43784bd535b772670d4b34ff" Feb 02 22:49:46 crc kubenswrapper[4755]: E0202 22:49:46.734763 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"619754f5b97ed79cc5369ae62ebb90cd7c3d018a43784bd535b772670d4b34ff\": container with ID starting with 619754f5b97ed79cc5369ae62ebb90cd7c3d018a43784bd535b772670d4b34ff not found: ID does not exist" containerID="619754f5b97ed79cc5369ae62ebb90cd7c3d018a43784bd535b772670d4b34ff" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.734811 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"619754f5b97ed79cc5369ae62ebb90cd7c3d018a43784bd535b772670d4b34ff"} err="failed to get container status \"619754f5b97ed79cc5369ae62ebb90cd7c3d018a43784bd535b772670d4b34ff\": rpc error: code = NotFound desc = could not find container \"619754f5b97ed79cc5369ae62ebb90cd7c3d018a43784bd535b772670d4b34ff\": container with ID starting with 619754f5b97ed79cc5369ae62ebb90cd7c3d018a43784bd535b772670d4b34ff not found: ID does not exist" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.734855 4755 scope.go:117] "RemoveContainer" containerID="c546458f5c106f3cb59ff583ea3244c43aa7a679dbab7a16b2a7f4443954e5b3" Feb 02 22:49:46 crc kubenswrapper[4755]: E0202 22:49:46.735180 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c546458f5c106f3cb59ff583ea3244c43aa7a679dbab7a16b2a7f4443954e5b3\": container with ID starting with c546458f5c106f3cb59ff583ea3244c43aa7a679dbab7a16b2a7f4443954e5b3 not found: ID does not exist" containerID="c546458f5c106f3cb59ff583ea3244c43aa7a679dbab7a16b2a7f4443954e5b3" Feb 02 22:49:46 crc kubenswrapper[4755]: I0202 22:49:46.735214 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c546458f5c106f3cb59ff583ea3244c43aa7a679dbab7a16b2a7f4443954e5b3"} err="failed to get container status \"c546458f5c106f3cb59ff583ea3244c43aa7a679dbab7a16b2a7f4443954e5b3\": rpc error: code = NotFound desc = could not find container \"c546458f5c106f3cb59ff583ea3244c43aa7a679dbab7a16b2a7f4443954e5b3\": container with ID starting with c546458f5c106f3cb59ff583ea3244c43aa7a679dbab7a16b2a7f4443954e5b3 not found: ID does not exist" Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.034102 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.096542 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ca1c569-aff0-4334-8317-137544ac5205" path="/var/lib/kubelet/pods/0ca1c569-aff0-4334-8317-137544ac5205/volumes" Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.164138 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-util\") pod \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\" (UID: \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\") " Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.164350 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmdw2\" (UniqueName: \"kubernetes.io/projected/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-kube-api-access-qmdw2\") pod \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\" (UID: \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\") " Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.164440 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-bundle\") pod \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\" (UID: \"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b\") " Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.165655 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-bundle" (OuterVolumeSpecName: "bundle") pod "137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" (UID: "137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.170370 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-kube-api-access-qmdw2" (OuterVolumeSpecName: "kube-api-access-qmdw2") pod "137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" (UID: "137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b"). InnerVolumeSpecName "kube-api-access-qmdw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.189621 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-util" (OuterVolumeSpecName: "util") pod "137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" (UID: "137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.265981 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmdw2\" (UniqueName: \"kubernetes.io/projected/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-kube-api-access-qmdw2\") on node \"crc\" DevicePath \"\"" Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.266468 4755 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.266593 4755 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b-util\") on node \"crc\" DevicePath \"\"" Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.635338 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" event={"ID":"137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b","Type":"ContainerDied","Data":"237aa802a7cab6e8af2f6a1ad77ac5e087336c6813227ace1747167f960c23c5"} Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.635408 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="237aa802a7cab6e8af2f6a1ad77ac5e087336c6813227ace1747167f960c23c5" Feb 02 22:49:47 crc kubenswrapper[4755]: I0202 22:49:47.635428 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.389064 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.389507 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.643378 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8"] Feb 02 22:49:53 crc kubenswrapper[4755]: E0202 22:49:53.643760 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ca1c569-aff0-4334-8317-137544ac5205" containerName="extract-content" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.643787 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ca1c569-aff0-4334-8317-137544ac5205" containerName="extract-content" Feb 02 22:49:53 crc kubenswrapper[4755]: E0202 22:49:53.643808 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" containerName="pull" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.643821 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" containerName="pull" Feb 02 22:49:53 crc kubenswrapper[4755]: E0202 22:49:53.643848 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" containerName="util" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.643861 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" containerName="util" Feb 02 22:49:53 crc kubenswrapper[4755]: E0202 22:49:53.643892 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ca1c569-aff0-4334-8317-137544ac5205" containerName="registry-server" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.643904 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ca1c569-aff0-4334-8317-137544ac5205" containerName="registry-server" Feb 02 22:49:53 crc kubenswrapper[4755]: E0202 22:49:53.643922 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ca1c569-aff0-4334-8317-137544ac5205" containerName="extract-utilities" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.643934 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ca1c569-aff0-4334-8317-137544ac5205" containerName="extract-utilities" Feb 02 22:49:53 crc kubenswrapper[4755]: E0202 22:49:53.643947 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" containerName="extract" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.643960 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" containerName="extract" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.644145 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ca1c569-aff0-4334-8317-137544ac5205" containerName="registry-server" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.644193 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b" containerName="extract" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.644872 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.648030 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-m8q6n" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.660808 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8"] Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.757640 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thlks\" (UniqueName: \"kubernetes.io/projected/65cf4adb-782b-4e5e-b961-4e5f29b83b80-kube-api-access-thlks\") pod \"openstack-operator-controller-init-848b5fdc57-2r7s8\" (UID: \"65cf4adb-782b-4e5e-b961-4e5f29b83b80\") " pod="openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.859216 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thlks\" (UniqueName: \"kubernetes.io/projected/65cf4adb-782b-4e5e-b961-4e5f29b83b80-kube-api-access-thlks\") pod \"openstack-operator-controller-init-848b5fdc57-2r7s8\" (UID: \"65cf4adb-782b-4e5e-b961-4e5f29b83b80\") " pod="openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.882704 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thlks\" (UniqueName: \"kubernetes.io/projected/65cf4adb-782b-4e5e-b961-4e5f29b83b80-kube-api-access-thlks\") pod \"openstack-operator-controller-init-848b5fdc57-2r7s8\" (UID: \"65cf4adb-782b-4e5e-b961-4e5f29b83b80\") " pod="openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8" Feb 02 22:49:53 crc kubenswrapper[4755]: I0202 22:49:53.972425 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8" Feb 02 22:49:54 crc kubenswrapper[4755]: I0202 22:49:54.398274 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8"] Feb 02 22:49:54 crc kubenswrapper[4755]: W0202 22:49:54.410115 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65cf4adb_782b_4e5e_b961_4e5f29b83b80.slice/crio-5cb8e677c9224e18b403faf3b5a5aaee34b5cf79d6f59f1760b968dc7ea62502 WatchSource:0}: Error finding container 5cb8e677c9224e18b403faf3b5a5aaee34b5cf79d6f59f1760b968dc7ea62502: Status 404 returned error can't find the container with id 5cb8e677c9224e18b403faf3b5a5aaee34b5cf79d6f59f1760b968dc7ea62502 Feb 02 22:49:54 crc kubenswrapper[4755]: I0202 22:49:54.696362 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8" event={"ID":"65cf4adb-782b-4e5e-b961-4e5f29b83b80","Type":"ContainerStarted","Data":"5cb8e677c9224e18b403faf3b5a5aaee34b5cf79d6f59f1760b968dc7ea62502"} Feb 02 22:49:58 crc kubenswrapper[4755]: I0202 22:49:58.735721 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8" event={"ID":"65cf4adb-782b-4e5e-b961-4e5f29b83b80","Type":"ContainerStarted","Data":"ea395933959719b58f79a3a946b792ce4478a79e6b74c1ec85fdf84db6e98ffe"} Feb 02 22:49:58 crc kubenswrapper[4755]: I0202 22:49:58.736430 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8" Feb 02 22:49:58 crc kubenswrapper[4755]: I0202 22:49:58.785500 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8" podStartSLOduration=2.176041761 podStartE2EDuration="5.785474708s" podCreationTimestamp="2026-02-02 22:49:53 +0000 UTC" firstStartedPulling="2026-02-02 22:49:54.41263147 +0000 UTC m=+950.103851836" lastFinishedPulling="2026-02-02 22:49:58.022064447 +0000 UTC m=+953.713284783" observedRunningTime="2026-02-02 22:49:58.782104694 +0000 UTC m=+954.473325050" watchObservedRunningTime="2026-02-02 22:49:58.785474708 +0000 UTC m=+954.476695064" Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.304976 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cdk7z"] Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.306978 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.326002 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cdk7z"] Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.400494 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/822f4261-0626-4423-9528-3f3e2db5cdce-utilities\") pod \"redhat-marketplace-cdk7z\" (UID: \"822f4261-0626-4423-9528-3f3e2db5cdce\") " pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.400751 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/822f4261-0626-4423-9528-3f3e2db5cdce-catalog-content\") pod \"redhat-marketplace-cdk7z\" (UID: \"822f4261-0626-4423-9528-3f3e2db5cdce\") " pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.400796 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6kfk\" (UniqueName: \"kubernetes.io/projected/822f4261-0626-4423-9528-3f3e2db5cdce-kube-api-access-n6kfk\") pod \"redhat-marketplace-cdk7z\" (UID: \"822f4261-0626-4423-9528-3f3e2db5cdce\") " pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.501330 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/822f4261-0626-4423-9528-3f3e2db5cdce-utilities\") pod \"redhat-marketplace-cdk7z\" (UID: \"822f4261-0626-4423-9528-3f3e2db5cdce\") " pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.501502 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/822f4261-0626-4423-9528-3f3e2db5cdce-catalog-content\") pod \"redhat-marketplace-cdk7z\" (UID: \"822f4261-0626-4423-9528-3f3e2db5cdce\") " pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.501538 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6kfk\" (UniqueName: \"kubernetes.io/projected/822f4261-0626-4423-9528-3f3e2db5cdce-kube-api-access-n6kfk\") pod \"redhat-marketplace-cdk7z\" (UID: \"822f4261-0626-4423-9528-3f3e2db5cdce\") " pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.502409 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/822f4261-0626-4423-9528-3f3e2db5cdce-utilities\") pod \"redhat-marketplace-cdk7z\" (UID: \"822f4261-0626-4423-9528-3f3e2db5cdce\") " pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.502442 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/822f4261-0626-4423-9528-3f3e2db5cdce-catalog-content\") pod \"redhat-marketplace-cdk7z\" (UID: \"822f4261-0626-4423-9528-3f3e2db5cdce\") " pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.525865 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6kfk\" (UniqueName: \"kubernetes.io/projected/822f4261-0626-4423-9528-3f3e2db5cdce-kube-api-access-n6kfk\") pod \"redhat-marketplace-cdk7z\" (UID: \"822f4261-0626-4423-9528-3f3e2db5cdce\") " pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.632459 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:03 crc kubenswrapper[4755]: I0202 22:50:03.974835 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-848b5fdc57-2r7s8" Feb 02 22:50:04 crc kubenswrapper[4755]: I0202 22:50:04.059946 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cdk7z"] Feb 02 22:50:04 crc kubenswrapper[4755]: I0202 22:50:04.783837 4755 generic.go:334] "Generic (PLEG): container finished" podID="822f4261-0626-4423-9528-3f3e2db5cdce" containerID="2b35c07db90088a7f2c6abfd433e930064c7903509b04430a5f45a319283aa08" exitCode=0 Feb 02 22:50:04 crc kubenswrapper[4755]: I0202 22:50:04.784051 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cdk7z" event={"ID":"822f4261-0626-4423-9528-3f3e2db5cdce","Type":"ContainerDied","Data":"2b35c07db90088a7f2c6abfd433e930064c7903509b04430a5f45a319283aa08"} Feb 02 22:50:04 crc kubenswrapper[4755]: I0202 22:50:04.784077 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cdk7z" event={"ID":"822f4261-0626-4423-9528-3f3e2db5cdce","Type":"ContainerStarted","Data":"accd066d7eb903a01062ae1dbff36895f28842394cae6eb59d8acf947e003654"} Feb 02 22:50:05 crc kubenswrapper[4755]: I0202 22:50:05.791362 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cdk7z" event={"ID":"822f4261-0626-4423-9528-3f3e2db5cdce","Type":"ContainerStarted","Data":"d0840290de9c1f25ae16c4ad137ebfb8f72d69c953eb17a2104d484b1a4f1bba"} Feb 02 22:50:06 crc kubenswrapper[4755]: I0202 22:50:06.803429 4755 generic.go:334] "Generic (PLEG): container finished" podID="822f4261-0626-4423-9528-3f3e2db5cdce" containerID="d0840290de9c1f25ae16c4ad137ebfb8f72d69c953eb17a2104d484b1a4f1bba" exitCode=0 Feb 02 22:50:06 crc kubenswrapper[4755]: I0202 22:50:06.803513 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cdk7z" event={"ID":"822f4261-0626-4423-9528-3f3e2db5cdce","Type":"ContainerDied","Data":"d0840290de9c1f25ae16c4ad137ebfb8f72d69c953eb17a2104d484b1a4f1bba"} Feb 02 22:50:07 crc kubenswrapper[4755]: I0202 22:50:07.813381 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cdk7z" event={"ID":"822f4261-0626-4423-9528-3f3e2db5cdce","Type":"ContainerStarted","Data":"72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1"} Feb 02 22:50:07 crc kubenswrapper[4755]: I0202 22:50:07.832047 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cdk7z" podStartSLOduration=2.382099531 podStartE2EDuration="4.832028716s" podCreationTimestamp="2026-02-02 22:50:03 +0000 UTC" firstStartedPulling="2026-02-02 22:50:04.785317926 +0000 UTC m=+960.476538252" lastFinishedPulling="2026-02-02 22:50:07.235247081 +0000 UTC m=+962.926467437" observedRunningTime="2026-02-02 22:50:07.831479361 +0000 UTC m=+963.522699687" watchObservedRunningTime="2026-02-02 22:50:07.832028716 +0000 UTC m=+963.523249042" Feb 02 22:50:13 crc kubenswrapper[4755]: I0202 22:50:13.633378 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:13 crc kubenswrapper[4755]: I0202 22:50:13.634022 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:13 crc kubenswrapper[4755]: I0202 22:50:13.681854 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:13 crc kubenswrapper[4755]: I0202 22:50:13.903787 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:13 crc kubenswrapper[4755]: I0202 22:50:13.944555 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cdk7z"] Feb 02 22:50:15 crc kubenswrapper[4755]: I0202 22:50:15.878183 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cdk7z" podUID="822f4261-0626-4423-9528-3f3e2db5cdce" containerName="registry-server" containerID="cri-o://72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1" gracePeriod=2 Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.307764 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.484308 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/822f4261-0626-4423-9528-3f3e2db5cdce-catalog-content\") pod \"822f4261-0626-4423-9528-3f3e2db5cdce\" (UID: \"822f4261-0626-4423-9528-3f3e2db5cdce\") " Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.484376 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/822f4261-0626-4423-9528-3f3e2db5cdce-utilities\") pod \"822f4261-0626-4423-9528-3f3e2db5cdce\" (UID: \"822f4261-0626-4423-9528-3f3e2db5cdce\") " Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.484486 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6kfk\" (UniqueName: \"kubernetes.io/projected/822f4261-0626-4423-9528-3f3e2db5cdce-kube-api-access-n6kfk\") pod \"822f4261-0626-4423-9528-3f3e2db5cdce\" (UID: \"822f4261-0626-4423-9528-3f3e2db5cdce\") " Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.485783 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/822f4261-0626-4423-9528-3f3e2db5cdce-utilities" (OuterVolumeSpecName: "utilities") pod "822f4261-0626-4423-9528-3f3e2db5cdce" (UID: "822f4261-0626-4423-9528-3f3e2db5cdce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.495166 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/822f4261-0626-4423-9528-3f3e2db5cdce-kube-api-access-n6kfk" (OuterVolumeSpecName: "kube-api-access-n6kfk") pod "822f4261-0626-4423-9528-3f3e2db5cdce" (UID: "822f4261-0626-4423-9528-3f3e2db5cdce"). InnerVolumeSpecName "kube-api-access-n6kfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.530297 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/822f4261-0626-4423-9528-3f3e2db5cdce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "822f4261-0626-4423-9528-3f3e2db5cdce" (UID: "822f4261-0626-4423-9528-3f3e2db5cdce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.585604 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6kfk\" (UniqueName: \"kubernetes.io/projected/822f4261-0626-4423-9528-3f3e2db5cdce-kube-api-access-n6kfk\") on node \"crc\" DevicePath \"\"" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.585637 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/822f4261-0626-4423-9528-3f3e2db5cdce-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.585651 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/822f4261-0626-4423-9528-3f3e2db5cdce-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.886586 4755 generic.go:334] "Generic (PLEG): container finished" podID="822f4261-0626-4423-9528-3f3e2db5cdce" containerID="72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1" exitCode=0 Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.886630 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cdk7z" event={"ID":"822f4261-0626-4423-9528-3f3e2db5cdce","Type":"ContainerDied","Data":"72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1"} Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.887904 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cdk7z" event={"ID":"822f4261-0626-4423-9528-3f3e2db5cdce","Type":"ContainerDied","Data":"accd066d7eb903a01062ae1dbff36895f28842394cae6eb59d8acf947e003654"} Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.887931 4755 scope.go:117] "RemoveContainer" containerID="72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.886657 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cdk7z" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.918616 4755 scope.go:117] "RemoveContainer" containerID="d0840290de9c1f25ae16c4ad137ebfb8f72d69c953eb17a2104d484b1a4f1bba" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.926185 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cdk7z"] Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.953795 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cdk7z"] Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.961526 4755 scope.go:117] "RemoveContainer" containerID="2b35c07db90088a7f2c6abfd433e930064c7903509b04430a5f45a319283aa08" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.980315 4755 scope.go:117] "RemoveContainer" containerID="72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1" Feb 02 22:50:16 crc kubenswrapper[4755]: E0202 22:50:16.980802 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1\": container with ID starting with 72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1 not found: ID does not exist" containerID="72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.980889 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1"} err="failed to get container status \"72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1\": rpc error: code = NotFound desc = could not find container \"72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1\": container with ID starting with 72813978d157a62ee872e50ef923c2d559a9513f9b3dbc58415b0f32f038afc1 not found: ID does not exist" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.980976 4755 scope.go:117] "RemoveContainer" containerID="d0840290de9c1f25ae16c4ad137ebfb8f72d69c953eb17a2104d484b1a4f1bba" Feb 02 22:50:16 crc kubenswrapper[4755]: E0202 22:50:16.981355 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0840290de9c1f25ae16c4ad137ebfb8f72d69c953eb17a2104d484b1a4f1bba\": container with ID starting with d0840290de9c1f25ae16c4ad137ebfb8f72d69c953eb17a2104d484b1a4f1bba not found: ID does not exist" containerID="d0840290de9c1f25ae16c4ad137ebfb8f72d69c953eb17a2104d484b1a4f1bba" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.981375 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0840290de9c1f25ae16c4ad137ebfb8f72d69c953eb17a2104d484b1a4f1bba"} err="failed to get container status \"d0840290de9c1f25ae16c4ad137ebfb8f72d69c953eb17a2104d484b1a4f1bba\": rpc error: code = NotFound desc = could not find container \"d0840290de9c1f25ae16c4ad137ebfb8f72d69c953eb17a2104d484b1a4f1bba\": container with ID starting with d0840290de9c1f25ae16c4ad137ebfb8f72d69c953eb17a2104d484b1a4f1bba not found: ID does not exist" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.981404 4755 scope.go:117] "RemoveContainer" containerID="2b35c07db90088a7f2c6abfd433e930064c7903509b04430a5f45a319283aa08" Feb 02 22:50:16 crc kubenswrapper[4755]: E0202 22:50:16.981594 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b35c07db90088a7f2c6abfd433e930064c7903509b04430a5f45a319283aa08\": container with ID starting with 2b35c07db90088a7f2c6abfd433e930064c7903509b04430a5f45a319283aa08 not found: ID does not exist" containerID="2b35c07db90088a7f2c6abfd433e930064c7903509b04430a5f45a319283aa08" Feb 02 22:50:16 crc kubenswrapper[4755]: I0202 22:50:16.981674 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b35c07db90088a7f2c6abfd433e930064c7903509b04430a5f45a319283aa08"} err="failed to get container status \"2b35c07db90088a7f2c6abfd433e930064c7903509b04430a5f45a319283aa08\": rpc error: code = NotFound desc = could not find container \"2b35c07db90088a7f2c6abfd433e930064c7903509b04430a5f45a319283aa08\": container with ID starting with 2b35c07db90088a7f2c6abfd433e930064c7903509b04430a5f45a319283aa08 not found: ID does not exist" Feb 02 22:50:17 crc kubenswrapper[4755]: I0202 22:50:17.082036 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="822f4261-0626-4423-9528-3f3e2db5cdce" path="/var/lib/kubelet/pods/822f4261-0626-4423-9528-3f3e2db5cdce/volumes" Feb 02 22:50:23 crc kubenswrapper[4755]: I0202 22:50:23.389629 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:50:23 crc kubenswrapper[4755]: I0202 22:50:23.390365 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:50:23 crc kubenswrapper[4755]: I0202 22:50:23.390436 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:50:23 crc kubenswrapper[4755]: I0202 22:50:23.960595 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b3e3969df739edd98047f8857204b723c7cae6ce3d65529d90b43e5d926f70bf"} pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 22:50:23 crc kubenswrapper[4755]: I0202 22:50:23.960791 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" containerID="cri-o://b3e3969df739edd98047f8857204b723c7cae6ce3d65529d90b43e5d926f70bf" gracePeriod=600 Feb 02 22:50:24 crc kubenswrapper[4755]: I0202 22:50:24.968683 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerID="b3e3969df739edd98047f8857204b723c7cae6ce3d65529d90b43e5d926f70bf" exitCode=0 Feb 02 22:50:24 crc kubenswrapper[4755]: I0202 22:50:24.968764 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerDied","Data":"b3e3969df739edd98047f8857204b723c7cae6ce3d65529d90b43e5d926f70bf"} Feb 02 22:50:24 crc kubenswrapper[4755]: I0202 22:50:24.969285 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"b7878a61f8677fe4ed7b8526051e4c43447e019572d069fa0c208b41ce260865"} Feb 02 22:50:24 crc kubenswrapper[4755]: I0202 22:50:24.969314 4755 scope.go:117] "RemoveContainer" containerID="bb11c6f11cba368ea19d2ffe2c3481fe5fb952a4be61f80011767257620e0091" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.017539 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn"] Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.027957 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="822f4261-0626-4423-9528-3f3e2db5cdce" containerName="registry-server" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.028060 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="822f4261-0626-4423-9528-3f3e2db5cdce" containerName="registry-server" Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.028128 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="822f4261-0626-4423-9528-3f3e2db5cdce" containerName="extract-content" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.028187 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="822f4261-0626-4423-9528-3f3e2db5cdce" containerName="extract-content" Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.028238 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="822f4261-0626-4423-9528-3f3e2db5cdce" containerName="extract-utilities" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.028304 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="822f4261-0626-4423-9528-3f3e2db5cdce" containerName="extract-utilities" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.028493 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="822f4261-0626-4423-9528-3f3e2db5cdce" containerName="registry-server" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.029000 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.030198 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.031326 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.034805 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.034926 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-klvkd" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.040135 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.040591 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-5pgq9" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.040968 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.047255 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-mxccf" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.059223 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.064035 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.064878 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.068008 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-nntzd" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.092550 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.102385 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk6hn\" (UniqueName: \"kubernetes.io/projected/62edcea1-a12a-428b-bec6-d5c14bcb2d9d-kube-api-access-rk6hn\") pod \"designate-operator-controller-manager-6d9697b7f4-sgwm7\" (UID: \"62edcea1-a12a-428b-bec6-d5c14bcb2d9d\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.102430 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bh7p\" (UniqueName: \"kubernetes.io/projected/06cf134b-94e8-4945-b882-bc54dd5c5045-kube-api-access-4bh7p\") pod \"glance-operator-controller-manager-8886f4c47-4br2h\" (UID: \"06cf134b-94e8-4945-b882-bc54dd5c5045\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.102460 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwk96\" (UniqueName: \"kubernetes.io/projected/f8a6ce9f-fde2-4696-9302-7edb0a04d233-kube-api-access-pwk96\") pod \"cinder-operator-controller-manager-8d874c8fc-gfmgq\" (UID: \"f8a6ce9f-fde2-4696-9302-7edb0a04d233\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.102528 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cnlk\" (UniqueName: \"kubernetes.io/projected/49a76ede-9115-4a09-b344-f7e130018c83-kube-api-access-5cnlk\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-lpnrn\" (UID: \"49a76ede-9115-4a09-b344-f7e130018c83\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.127058 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.127875 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.129602 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-bpd2s" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.144419 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.154811 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.160564 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.161927 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.202917 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-v5nnt" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.217266 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv9xt\" (UniqueName: \"kubernetes.io/projected/cc7aea9e-48ae-4d78-835e-3516d8bdd1e0-kube-api-access-dv9xt\") pod \"heat-operator-controller-manager-69d6db494d-lz7b7\" (UID: \"cc7aea9e-48ae-4d78-835e-3516d8bdd1e0\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.217675 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cnlk\" (UniqueName: \"kubernetes.io/projected/49a76ede-9115-4a09-b344-f7e130018c83-kube-api-access-5cnlk\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-lpnrn\" (UID: \"49a76ede-9115-4a09-b344-f7e130018c83\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.218070 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk6hn\" (UniqueName: \"kubernetes.io/projected/62edcea1-a12a-428b-bec6-d5c14bcb2d9d-kube-api-access-rk6hn\") pod \"designate-operator-controller-manager-6d9697b7f4-sgwm7\" (UID: \"62edcea1-a12a-428b-bec6-d5c14bcb2d9d\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.218197 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bh7p\" (UniqueName: \"kubernetes.io/projected/06cf134b-94e8-4945-b882-bc54dd5c5045-kube-api-access-4bh7p\") pod \"glance-operator-controller-manager-8886f4c47-4br2h\" (UID: \"06cf134b-94e8-4945-b882-bc54dd5c5045\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.218316 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwk96\" (UniqueName: \"kubernetes.io/projected/f8a6ce9f-fde2-4696-9302-7edb0a04d233-kube-api-access-pwk96\") pod \"cinder-operator-controller-manager-8d874c8fc-gfmgq\" (UID: \"f8a6ce9f-fde2-4696-9302-7edb0a04d233\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.218490 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9b87\" (UniqueName: \"kubernetes.io/projected/7cdb62bb-2e9f-43c8-a6ac-5e05577fb7bd-kube-api-access-d9b87\") pod \"horizon-operator-controller-manager-5fb775575f-hvd6r\" (UID: \"7cdb62bb-2e9f-43c8-a6ac-5e05577fb7bd\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.233560 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-zh58s"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.235000 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.245837 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-88dqm" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.246471 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.250536 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cnlk\" (UniqueName: \"kubernetes.io/projected/49a76ede-9115-4a09-b344-f7e130018c83-kube-api-access-5cnlk\") pod \"barbican-operator-controller-manager-7b6c4d8c5f-lpnrn\" (UID: \"49a76ede-9115-4a09-b344-f7e130018c83\") " pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.251068 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.260748 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwk96\" (UniqueName: \"kubernetes.io/projected/f8a6ce9f-fde2-4696-9302-7edb0a04d233-kube-api-access-pwk96\") pod \"cinder-operator-controller-manager-8d874c8fc-gfmgq\" (UID: \"f8a6ce9f-fde2-4696-9302-7edb0a04d233\") " pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.272340 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bh7p\" (UniqueName: \"kubernetes.io/projected/06cf134b-94e8-4945-b882-bc54dd5c5045-kube-api-access-4bh7p\") pod \"glance-operator-controller-manager-8886f4c47-4br2h\" (UID: \"06cf134b-94e8-4945-b882-bc54dd5c5045\") " pod="openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.276004 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk6hn\" (UniqueName: \"kubernetes.io/projected/62edcea1-a12a-428b-bec6-d5c14bcb2d9d-kube-api-access-rk6hn\") pod \"designate-operator-controller-manager-6d9697b7f4-sgwm7\" (UID: \"62edcea1-a12a-428b-bec6-d5c14bcb2d9d\") " pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.276710 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-zh58s"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.282794 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.283753 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.289255 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-nmt47" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.322039 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert\") pod \"infra-operator-controller-manager-79955696d6-zh58s\" (UID: \"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.322163 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9b87\" (UniqueName: \"kubernetes.io/projected/7cdb62bb-2e9f-43c8-a6ac-5e05577fb7bd-kube-api-access-d9b87\") pod \"horizon-operator-controller-manager-5fb775575f-hvd6r\" (UID: \"7cdb62bb-2e9f-43c8-a6ac-5e05577fb7bd\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.322244 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dp528\" (UniqueName: \"kubernetes.io/projected/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-kube-api-access-dp528\") pod \"infra-operator-controller-manager-79955696d6-zh58s\" (UID: \"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.322325 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv9xt\" (UniqueName: \"kubernetes.io/projected/cc7aea9e-48ae-4d78-835e-3516d8bdd1e0-kube-api-access-dv9xt\") pod \"heat-operator-controller-manager-69d6db494d-lz7b7\" (UID: \"cc7aea9e-48ae-4d78-835e-3516d8bdd1e0\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.322395 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdbz9\" (UniqueName: \"kubernetes.io/projected/4bf6953c-28a8-49f6-b850-d6572decd288-kube-api-access-wdbz9\") pod \"ironic-operator-controller-manager-5f4b8bd54d-hlqln\" (UID: \"4bf6953c-28a8-49f6-b850-d6572decd288\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.335761 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.337070 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.344470 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv9xt\" (UniqueName: \"kubernetes.io/projected/cc7aea9e-48ae-4d78-835e-3516d8bdd1e0-kube-api-access-dv9xt\") pod \"heat-operator-controller-manager-69d6db494d-lz7b7\" (UID: \"cc7aea9e-48ae-4d78-835e-3516d8bdd1e0\") " pod="openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.344971 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-s6vq5" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.345977 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.349048 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9b87\" (UniqueName: \"kubernetes.io/projected/7cdb62bb-2e9f-43c8-a6ac-5e05577fb7bd-kube-api-access-d9b87\") pod \"horizon-operator-controller-manager-5fb775575f-hvd6r\" (UID: \"7cdb62bb-2e9f-43c8-a6ac-5e05577fb7bd\") " pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.360821 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.363355 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.370793 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.377175 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.386953 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.387830 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.388452 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.389948 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-xdhvh" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.390003 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-h8bwr" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.400127 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.400461 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.403208 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.408074 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-644bb"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.408869 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-644bb" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.410450 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-qtq25" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.416382 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.420430 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.424126 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxkwj\" (UniqueName: \"kubernetes.io/projected/62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b-kube-api-access-rxkwj\") pod \"keystone-operator-controller-manager-84f48565d4-rj689\" (UID: \"62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.424292 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert\") pod \"infra-operator-controller-manager-79955696d6-zh58s\" (UID: \"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.424408 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dp528\" (UniqueName: \"kubernetes.io/projected/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-kube-api-access-dp528\") pod \"infra-operator-controller-manager-79955696d6-zh58s\" (UID: \"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.424515 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdbz9\" (UniqueName: \"kubernetes.io/projected/4bf6953c-28a8-49f6-b850-d6572decd288-kube-api-access-wdbz9\") pod \"ironic-operator-controller-manager-5f4b8bd54d-hlqln\" (UID: \"4bf6953c-28a8-49f6-b850-d6572decd288\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.424602 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.424851 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf2l4\" (UniqueName: \"kubernetes.io/projected/67dc65c3-ffb5-4139-b405-87a180ddb551-kube-api-access-gf2l4\") pod \"manila-operator-controller-manager-7dd968899f-f8k9j\" (UID: \"67dc65c3-ffb5-4139-b405-87a180ddb551\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.425005 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftptx\" (UniqueName: \"kubernetes.io/projected/c89c93bd-725d-4cb4-9464-22674774af64-kube-api-access-ftptx\") pod \"mariadb-operator-controller-manager-67bf948998-h69gk\" (UID: \"c89c93bd-725d-4cb4-9464-22674774af64\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk" Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.425332 4755 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.425515 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert podName:44ef88e4-d62d-4f16-ab3c-15b7136ac5c9 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:39.925494861 +0000 UTC m=+995.616715187 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert") pod "infra-operator-controller-manager-79955696d6-zh58s" (UID: "44ef88e4-d62d-4f16-ab3c-15b7136ac5c9") : secret "infra-operator-webhook-server-cert" not found Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.424527 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-644bb"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.427015 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.431293 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.431937 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-295m7" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.433193 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.440877 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-cgclv" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.457782 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.461742 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.470618 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dp528\" (UniqueName: \"kubernetes.io/projected/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-kube-api-access-dp528\") pod \"infra-operator-controller-manager-79955696d6-zh58s\" (UID: \"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.472185 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.476282 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdbz9\" (UniqueName: \"kubernetes.io/projected/4bf6953c-28a8-49f6-b850-d6572decd288-kube-api-access-wdbz9\") pod \"ironic-operator-controller-manager-5f4b8bd54d-hlqln\" (UID: \"4bf6953c-28a8-49f6-b850-d6572decd288\") " pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.476694 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.479029 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-5n9hl" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.487476 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.488388 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.490650 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.496303 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-9sk9v" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.498794 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.500188 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.503872 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-68b9r" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.505900 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.509867 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.518912 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.523786 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.524726 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.526302 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.527043 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-2fsgj" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.528694 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6br5p\" (UniqueName: \"kubernetes.io/projected/e9675ae3-4e81-4adb-85a0-cd21ac496df2-kube-api-access-6br5p\") pod \"neutron-operator-controller-manager-585dbc889-644bb\" (UID: \"e9675ae3-4e81-4adb-85a0-cd21ac496df2\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-644bb" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.528757 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf2l4\" (UniqueName: \"kubernetes.io/projected/67dc65c3-ffb5-4139-b405-87a180ddb551-kube-api-access-gf2l4\") pod \"manila-operator-controller-manager-7dd968899f-f8k9j\" (UID: \"67dc65c3-ffb5-4139-b405-87a180ddb551\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.528779 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftptx\" (UniqueName: \"kubernetes.io/projected/c89c93bd-725d-4cb4-9464-22674774af64-kube-api-access-ftptx\") pod \"mariadb-operator-controller-manager-67bf948998-h69gk\" (UID: \"c89c93bd-725d-4cb4-9464-22674774af64\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.528808 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs\" (UID: \"28233917-7a5b-4379-aa43-c42633f51848\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.528848 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np7k8\" (UniqueName: \"kubernetes.io/projected/1734ceac-dd78-4d4b-986c-5a3c27c3c48f-kube-api-access-np7k8\") pod \"octavia-operator-controller-manager-6687f8d877-dwrws\" (UID: \"1734ceac-dd78-4d4b-986c-5a3c27c3c48f\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.528868 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxkwj\" (UniqueName: \"kubernetes.io/projected/62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b-kube-api-access-rxkwj\") pod \"keystone-operator-controller-manager-84f48565d4-rj689\" (UID: \"62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.529308 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcgzr\" (UniqueName: \"kubernetes.io/projected/afe1c6d0-666e-46f4-93f9-a814399d699b-kube-api-access-pcgzr\") pod \"nova-operator-controller-manager-55bff696bd-lm5vg\" (UID: \"afe1c6d0-666e-46f4-93f9-a814399d699b\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.529342 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzg7j\" (UniqueName: \"kubernetes.io/projected/ec4fb29e-f536-46ec-bd89-5b212f2a5d13-kube-api-access-xzg7j\") pod \"placement-operator-controller-manager-5b964cf4cd-7p67q\" (UID: \"ec4fb29e-f536-46ec-bd89-5b212f2a5d13\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.529360 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grph5\" (UniqueName: \"kubernetes.io/projected/5073a3de-5fb1-4375-9db4-a7009d6b8799-kube-api-access-grph5\") pod \"ovn-operator-controller-manager-788c46999f-5q9mw\" (UID: \"5073a3de-5fb1-4375-9db4-a7009d6b8799\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.529380 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46s7f\" (UniqueName: \"kubernetes.io/projected/28233917-7a5b-4379-aa43-c42633f51848-kube-api-access-46s7f\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs\" (UID: \"28233917-7a5b-4379-aa43-c42633f51848\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.532159 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.582029 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf2l4\" (UniqueName: \"kubernetes.io/projected/67dc65c3-ffb5-4139-b405-87a180ddb551-kube-api-access-gf2l4\") pod \"manila-operator-controller-manager-7dd968899f-f8k9j\" (UID: \"67dc65c3-ffb5-4139-b405-87a180ddb551\") " pod="openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.582991 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftptx\" (UniqueName: \"kubernetes.io/projected/c89c93bd-725d-4cb4-9464-22674774af64-kube-api-access-ftptx\") pod \"mariadb-operator-controller-manager-67bf948998-h69gk\" (UID: \"c89c93bd-725d-4cb4-9464-22674774af64\") " pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.589407 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxkwj\" (UniqueName: \"kubernetes.io/projected/62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b-kube-api-access-rxkwj\") pod \"keystone-operator-controller-manager-84f48565d4-rj689\" (UID: \"62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b\") " pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.601187 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.602278 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.609117 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-t6r5m" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.630455 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.632098 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.632834 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np7k8\" (UniqueName: \"kubernetes.io/projected/1734ceac-dd78-4d4b-986c-5a3c27c3c48f-kube-api-access-np7k8\") pod \"octavia-operator-controller-manager-6687f8d877-dwrws\" (UID: \"1734ceac-dd78-4d4b-986c-5a3c27c3c48f\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.632887 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcgzr\" (UniqueName: \"kubernetes.io/projected/afe1c6d0-666e-46f4-93f9-a814399d699b-kube-api-access-pcgzr\") pod \"nova-operator-controller-manager-55bff696bd-lm5vg\" (UID: \"afe1c6d0-666e-46f4-93f9-a814399d699b\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.632909 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzg7j\" (UniqueName: \"kubernetes.io/projected/ec4fb29e-f536-46ec-bd89-5b212f2a5d13-kube-api-access-xzg7j\") pod \"placement-operator-controller-manager-5b964cf4cd-7p67q\" (UID: \"ec4fb29e-f536-46ec-bd89-5b212f2a5d13\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.632928 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grph5\" (UniqueName: \"kubernetes.io/projected/5073a3de-5fb1-4375-9db4-a7009d6b8799-kube-api-access-grph5\") pod \"ovn-operator-controller-manager-788c46999f-5q9mw\" (UID: \"5073a3de-5fb1-4375-9db4-a7009d6b8799\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.632947 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46s7f\" (UniqueName: \"kubernetes.io/projected/28233917-7a5b-4379-aa43-c42633f51848-kube-api-access-46s7f\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs\" (UID: \"28233917-7a5b-4379-aa43-c42633f51848\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.632966 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6br5p\" (UniqueName: \"kubernetes.io/projected/e9675ae3-4e81-4adb-85a0-cd21ac496df2-kube-api-access-6br5p\") pod \"neutron-operator-controller-manager-585dbc889-644bb\" (UID: \"e9675ae3-4e81-4adb-85a0-cd21ac496df2\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-644bb" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.632993 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8z9w\" (UniqueName: \"kubernetes.io/projected/b5d1ece3-a9d2-4620-98f2-2bd2ff66184d-kube-api-access-f8z9w\") pod \"swift-operator-controller-manager-68fc8c869-cv8gh\" (UID: \"b5d1ece3-a9d2-4620-98f2-2bd2ff66184d\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.633026 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs\" (UID: \"28233917-7a5b-4379-aa43-c42633f51848\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.633060 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zxqd\" (UniqueName: \"kubernetes.io/projected/fd656823-f4de-4e4d-a109-7a180552abd1-kube-api-access-6zxqd\") pod \"telemetry-operator-controller-manager-5b96584f66-98jt2\" (UID: \"fd656823-f4de-4e4d-a109-7a180552abd1\") " pod="openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2" Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.633448 4755 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.633483 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert podName:28233917-7a5b-4379-aa43-c42633f51848 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:40.13346932 +0000 UTC m=+995.824689646 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" (UID: "28233917-7a5b-4379-aa43-c42633f51848") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.664647 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grph5\" (UniqueName: \"kubernetes.io/projected/5073a3de-5fb1-4375-9db4-a7009d6b8799-kube-api-access-grph5\") pod \"ovn-operator-controller-manager-788c46999f-5q9mw\" (UID: \"5073a3de-5fb1-4375-9db4-a7009d6b8799\") " pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.667819 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzg7j\" (UniqueName: \"kubernetes.io/projected/ec4fb29e-f536-46ec-bd89-5b212f2a5d13-kube-api-access-xzg7j\") pod \"placement-operator-controller-manager-5b964cf4cd-7p67q\" (UID: \"ec4fb29e-f536-46ec-bd89-5b212f2a5d13\") " pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.671326 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcgzr\" (UniqueName: \"kubernetes.io/projected/afe1c6d0-666e-46f4-93f9-a814399d699b-kube-api-access-pcgzr\") pod \"nova-operator-controller-manager-55bff696bd-lm5vg\" (UID: \"afe1c6d0-666e-46f4-93f9-a814399d699b\") " pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.680891 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6br5p\" (UniqueName: \"kubernetes.io/projected/e9675ae3-4e81-4adb-85a0-cd21ac496df2-kube-api-access-6br5p\") pod \"neutron-operator-controller-manager-585dbc889-644bb\" (UID: \"e9675ae3-4e81-4adb-85a0-cd21ac496df2\") " pod="openstack-operators/neutron-operator-controller-manager-585dbc889-644bb" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.690762 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46s7f\" (UniqueName: \"kubernetes.io/projected/28233917-7a5b-4379-aa43-c42633f51848-kube-api-access-46s7f\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs\" (UID: \"28233917-7a5b-4379-aa43-c42633f51848\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.691438 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np7k8\" (UniqueName: \"kubernetes.io/projected/1734ceac-dd78-4d4b-986c-5a3c27c3c48f-kube-api-access-np7k8\") pod \"octavia-operator-controller-manager-6687f8d877-dwrws\" (UID: \"1734ceac-dd78-4d4b-986c-5a3c27c3c48f\") " pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.714799 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.740866 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zxqd\" (UniqueName: \"kubernetes.io/projected/fd656823-f4de-4e4d-a109-7a180552abd1-kube-api-access-6zxqd\") pod \"telemetry-operator-controller-manager-5b96584f66-98jt2\" (UID: \"fd656823-f4de-4e4d-a109-7a180552abd1\") " pod="openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.744718 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8z9w\" (UniqueName: \"kubernetes.io/projected/b5d1ece3-a9d2-4620-98f2-2bd2ff66184d-kube-api-access-f8z9w\") pod \"swift-operator-controller-manager-68fc8c869-cv8gh\" (UID: \"b5d1ece3-a9d2-4620-98f2-2bd2ff66184d\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.745593 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.747346 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.747582 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.751147 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-brnwb" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.760261 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.778678 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zxqd\" (UniqueName: \"kubernetes.io/projected/fd656823-f4de-4e4d-a109-7a180552abd1-kube-api-access-6zxqd\") pod \"telemetry-operator-controller-manager-5b96584f66-98jt2\" (UID: \"fd656823-f4de-4e4d-a109-7a180552abd1\") " pod="openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.778862 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8z9w\" (UniqueName: \"kubernetes.io/projected/b5d1ece3-a9d2-4620-98f2-2bd2ff66184d-kube-api-access-f8z9w\") pod \"swift-operator-controller-manager-68fc8c869-cv8gh\" (UID: \"b5d1ece3-a9d2-4620-98f2-2bd2ff66184d\") " pod="openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.778965 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.783444 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-5prbd"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.784374 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.788299 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-45ntc" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.796848 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-644bb" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.807825 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-5prbd"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.814479 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.815451 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.817787 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-5qg9p" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.817990 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.818108 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.822183 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.832297 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.843898 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.846247 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fgv9\" (UniqueName: \"kubernetes.io/projected/04cd6a32-6398-4f89-b034-3a9ebf8da40b-kube-api-access-7fgv9\") pod \"watcher-operator-controller-manager-564965969-5prbd\" (UID: \"04cd6a32-6398-4f89-b034-3a9ebf8da40b\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.846299 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.846370 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwrg8\" (UniqueName: \"kubernetes.io/projected/9960033f-69b1-4b1c-9e06-aaf5e6d61559-kube-api-access-xwrg8\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.846394 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfdqx\" (UniqueName: \"kubernetes.io/projected/368193dc-45fb-4dff-8c24-8c38a7fd56da-kube-api-access-rfdqx\") pod \"test-operator-controller-manager-56f8bfcd9f-6xtvs\" (UID: \"368193dc-45fb-4dff-8c24-8c38a7fd56da\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.846414 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.847148 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.848108 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.851780 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-fkf67" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.857370 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk"] Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.858006 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.925388 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.942212 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.947153 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfdqx\" (UniqueName: \"kubernetes.io/projected/368193dc-45fb-4dff-8c24-8c38a7fd56da-kube-api-access-rfdqx\") pod \"test-operator-controller-manager-56f8bfcd9f-6xtvs\" (UID: \"368193dc-45fb-4dff-8c24-8c38a7fd56da\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.947196 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.947221 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvwgt\" (UniqueName: \"kubernetes.io/projected/b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5-kube-api-access-xvwgt\") pod \"rabbitmq-cluster-operator-manager-668c99d594-lpvlk\" (UID: \"b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.947276 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fgv9\" (UniqueName: \"kubernetes.io/projected/04cd6a32-6398-4f89-b034-3a9ebf8da40b-kube-api-access-7fgv9\") pod \"watcher-operator-controller-manager-564965969-5prbd\" (UID: \"04cd6a32-6398-4f89-b034-3a9ebf8da40b\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.947301 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.947336 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert\") pod \"infra-operator-controller-manager-79955696d6-zh58s\" (UID: \"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.947368 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwrg8\" (UniqueName: \"kubernetes.io/projected/9960033f-69b1-4b1c-9e06-aaf5e6d61559-kube-api-access-xwrg8\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.948047 4755 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.948107 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs podName:9960033f-69b1-4b1c-9e06-aaf5e6d61559 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:40.448089118 +0000 UTC m=+996.139309444 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs") pod "openstack-operator-controller-manager-bc8597898-njwc6" (UID: "9960033f-69b1-4b1c-9e06-aaf5e6d61559") : secret "webhook-server-cert" not found Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.948105 4755 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.948135 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs podName:9960033f-69b1-4b1c-9e06-aaf5e6d61559 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:40.448129559 +0000 UTC m=+996.139349885 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs") pod "openstack-operator-controller-manager-bc8597898-njwc6" (UID: "9960033f-69b1-4b1c-9e06-aaf5e6d61559") : secret "metrics-server-cert" not found Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.948202 4755 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 22:50:39 crc kubenswrapper[4755]: E0202 22:50:39.948265 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert podName:44ef88e4-d62d-4f16-ab3c-15b7136ac5c9 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:40.948247673 +0000 UTC m=+996.639467999 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert") pod "infra-operator-controller-manager-79955696d6-zh58s" (UID: "44ef88e4-d62d-4f16-ab3c-15b7136ac5c9") : secret "infra-operator-webhook-server-cert" not found Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.964357 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.973022 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fgv9\" (UniqueName: \"kubernetes.io/projected/04cd6a32-6398-4f89-b034-3a9ebf8da40b-kube-api-access-7fgv9\") pod \"watcher-operator-controller-manager-564965969-5prbd\" (UID: \"04cd6a32-6398-4f89-b034-3a9ebf8da40b\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.973137 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfdqx\" (UniqueName: \"kubernetes.io/projected/368193dc-45fb-4dff-8c24-8c38a7fd56da-kube-api-access-rfdqx\") pod \"test-operator-controller-manager-56f8bfcd9f-6xtvs\" (UID: \"368193dc-45fb-4dff-8c24-8c38a7fd56da\") " pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" Feb 02 22:50:39 crc kubenswrapper[4755]: I0202 22:50:39.973351 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwrg8\" (UniqueName: \"kubernetes.io/projected/9960033f-69b1-4b1c-9e06-aaf5e6d61559-kube-api-access-xwrg8\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.052572 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvwgt\" (UniqueName: \"kubernetes.io/projected/b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5-kube-api-access-xvwgt\") pod \"rabbitmq-cluster-operator-manager-668c99d594-lpvlk\" (UID: \"b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk" Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.084174 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvwgt\" (UniqueName: \"kubernetes.io/projected/b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5-kube-api-access-xvwgt\") pod \"rabbitmq-cluster-operator-manager-668c99d594-lpvlk\" (UID: \"b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk" Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.153979 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs\" (UID: \"28233917-7a5b-4379-aa43-c42633f51848\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.154298 4755 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.154356 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert podName:28233917-7a5b-4379-aa43-c42633f51848 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:41.154339309 +0000 UTC m=+996.845559635 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" (UID: "28233917-7a5b-4379-aa43-c42633f51848") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.164618 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.183062 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.212999 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk" Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.246530 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.258666 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7"] Feb 02 22:50:40 crc kubenswrapper[4755]: W0202 22:50:40.283558 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62edcea1_a12a_428b_bec6_d5c14bcb2d9d.slice/crio-ec9f2af29c1cc1b66e2c3a8a35c73589d15bb578aff5483d8447689cb122efb8 WatchSource:0}: Error finding container ec9f2af29c1cc1b66e2c3a8a35c73589d15bb578aff5483d8447689cb122efb8: Status 404 returned error can't find the container with id ec9f2af29c1cc1b66e2c3a8a35c73589d15bb578aff5483d8447689cb122efb8 Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.456362 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.458501 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.458596 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.458756 4755 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.458813 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs podName:9960033f-69b1-4b1c-9e06-aaf5e6d61559 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:41.458790554 +0000 UTC m=+997.150010880 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs") pod "openstack-operator-controller-manager-bc8597898-njwc6" (UID: "9960033f-69b1-4b1c-9e06-aaf5e6d61559") : secret "metrics-server-cert" not found Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.459149 4755 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.459181 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs podName:9960033f-69b1-4b1c-9e06-aaf5e6d61559 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:41.459169994 +0000 UTC m=+997.150390320 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs") pod "openstack-operator-controller-manager-bc8597898-njwc6" (UID: "9960033f-69b1-4b1c-9e06-aaf5e6d61559") : secret "webhook-server-cert" not found Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.467054 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn"] Feb 02 22:50:40 crc kubenswrapper[4755]: W0202 22:50:40.468214 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc7aea9e_48ae_4d78_835e_3516d8bdd1e0.slice/crio-229d11b306597f47427864edeee781f752bcfe77a607c14523b5053ff24dedca WatchSource:0}: Error finding container 229d11b306597f47427864edeee781f752bcfe77a607c14523b5053ff24dedca: Status 404 returned error can't find the container with id 229d11b306597f47427864edeee781f752bcfe77a607c14523b5053ff24dedca Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.479842 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.494536 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.611353 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.617955 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-585dbc889-644bb"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.624137 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.630222 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk"] Feb 02 22:50:40 crc kubenswrapper[4755]: W0202 22:50:40.638691 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9675ae3_4e81_4adb_85a0_cd21ac496df2.slice/crio-144d51365348590732f3648c45531fdc056361c5f0ae14f916c5a3d29ae1feed WatchSource:0}: Error finding container 144d51365348590732f3648c45531fdc056361c5f0ae14f916c5a3d29ae1feed: Status 404 returned error can't find the container with id 144d51365348590732f3648c45531fdc056361c5f0ae14f916c5a3d29ae1feed Feb 02 22:50:40 crc kubenswrapper[4755]: W0202 22:50:40.640091 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4bf6953c_28a8_49f6_b850_d6572decd288.slice/crio-32551276cc07685fb00ab53d224aa362a461afe9e2d5ab109bc1b8120903da36 WatchSource:0}: Error finding container 32551276cc07685fb00ab53d224aa362a461afe9e2d5ab109bc1b8120903da36: Status 404 returned error can't find the container with id 32551276cc07685fb00ab53d224aa362a461afe9e2d5ab109bc1b8120903da36 Feb 02 22:50:40 crc kubenswrapper[4755]: W0202 22:50:40.653337 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc89c93bd_725d_4cb4_9464_22674774af64.slice/crio-6ba3361820c785acaa6311f15698662b62fab89ae107fdaf0ffe27a1db238b17 WatchSource:0}: Error finding container 6ba3361820c785acaa6311f15698662b62fab89ae107fdaf0ffe27a1db238b17: Status 404 returned error can't find the container with id 6ba3361820c785acaa6311f15698662b62fab89ae107fdaf0ffe27a1db238b17 Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.748628 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2"] Feb 02 22:50:40 crc kubenswrapper[4755]: W0202 22:50:40.761196 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5073a3de_5fb1_4375_9db4_a7009d6b8799.slice/crio-6daa36b52b84828cd00f05d8028d64ec8fb3d08c7dc0c8e32742ba132ec73fc5 WatchSource:0}: Error finding container 6daa36b52b84828cd00f05d8028d64ec8fb3d08c7dc0c8e32742ba132ec73fc5: Status 404 returned error can't find the container with id 6daa36b52b84828cd00f05d8028d64ec8fb3d08c7dc0c8e32742ba132ec73fc5 Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.764795 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.772977 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689"] Feb 02 22:50:40 crc kubenswrapper[4755]: W0202 22:50:40.773119 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62f03e5d_4b33_46dc_b74f_c5b2f19e8d7b.slice/crio-2f7934ca662f64f315206f2232ee875aa5b03579e3d45cf1cc132f04d43c8de5 WatchSource:0}: Error finding container 2f7934ca662f64f315206f2232ee875aa5b03579e3d45cf1cc132f04d43c8de5: Status 404 returned error can't find the container with id 2f7934ca662f64f315206f2232ee875aa5b03579e3d45cf1cc132f04d43c8de5 Feb 02 22:50:40 crc kubenswrapper[4755]: W0202 22:50:40.774246 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1734ceac_dd78_4d4b_986c_5a3c27c3c48f.slice/crio-7cb07324275a68347efa69f625af3f4e398bbc4c05a6dc0f6cc2231f0543a368 WatchSource:0}: Error finding container 7cb07324275a68347efa69f625af3f4e398bbc4c05a6dc0f6cc2231f0543a368: Status 404 returned error can't find the container with id 7cb07324275a68347efa69f625af3f4e398bbc4c05a6dc0f6cc2231f0543a368 Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.779378 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q"] Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.779591 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:319c969e88f109b26487a9f5a67203682803d7386424703ab7ca0340be99ae17,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rxkwj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-84f48565d4-rj689_openstack-operators(62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.779912 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-np7k8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-6687f8d877-dwrws_openstack-operators(1734ceac-dd78-4d4b-986c-5a3c27c3c48f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.780993 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" podUID="1734ceac-dd78-4d4b-986c-5a3c27c3c48f" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.785005 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" podUID="62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.809979 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pcgzr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-55bff696bd-lm5vg_openstack-operators(afe1c6d0-666e-46f4-93f9-a814399d699b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.811087 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" podUID="afe1c6d0-666e-46f4-93f9-a814399d699b" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.817144 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:e0824d5d461ada59715eb3048ed9394c80abba09c45503f8f90ee3b34e525488,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xzg7j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5b964cf4cd-7p67q_openstack-operators(ec4fb29e-f536-46ec-bd89-5b212f2a5d13): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.819606 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" podUID="ec4fb29e-f536-46ec-bd89-5b212f2a5d13" Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.819643 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.825479 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.925335 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.929493 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.938175 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk"] Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.942913 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-5prbd"] Feb 02 22:50:40 crc kubenswrapper[4755]: W0202 22:50:40.944122 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5d1ece3_a9d2_4620_98f2_2bd2ff66184d.slice/crio-ad073cf3b15d5821a09016b34c8340658aad48808ea37e204a71de7eccaeb121 WatchSource:0}: Error finding container ad073cf3b15d5821a09016b34c8340658aad48808ea37e204a71de7eccaeb121: Status 404 returned error can't find the container with id ad073cf3b15d5821a09016b34c8340658aad48808ea37e204a71de7eccaeb121 Feb 02 22:50:40 crc kubenswrapper[4755]: W0202 22:50:40.945983 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0dd57f2_6b55_4cc3_827f_a5c2321b2ad5.slice/crio-7b033fb7c6dda3d8c77b92133b11958f34fe34959789995dbe03f1c21498717e WatchSource:0}: Error finding container 7b033fb7c6dda3d8c77b92133b11958f34fe34959789995dbe03f1c21498717e: Status 404 returned error can't find the container with id 7b033fb7c6dda3d8c77b92133b11958f34fe34959789995dbe03f1c21498717e Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.949316 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xvwgt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-lpvlk_openstack-operators(b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 22:50:40 crc kubenswrapper[4755]: W0202 22:50:40.949723 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod368193dc_45fb_4dff_8c24_8c38a7fd56da.slice/crio-7c82ee5c3cbd15568b30ffca1fd92b7d506121a8fde367533113b5ceb58be122 WatchSource:0}: Error finding container 7c82ee5c3cbd15568b30ffca1fd92b7d506121a8fde367533113b5ceb58be122: Status 404 returned error can't find the container with id 7c82ee5c3cbd15568b30ffca1fd92b7d506121a8fde367533113b5ceb58be122 Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.950685 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk" podUID="b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.957267 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7fgv9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-564965969-5prbd_openstack-operators(04cd6a32-6398-4f89-b034-3a9ebf8da40b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.957506 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rfdqx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-56f8bfcd9f-6xtvs_openstack-operators(368193dc-45fb-4dff-8c24-8c38a7fd56da): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.959215 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" podUID="368193dc-45fb-4dff-8c24-8c38a7fd56da" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.959318 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" podUID="04cd6a32-6398-4f89-b034-3a9ebf8da40b" Feb 02 22:50:40 crc kubenswrapper[4755]: I0202 22:50:40.965658 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert\") pod \"infra-operator-controller-manager-79955696d6-zh58s\" (UID: \"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.965896 4755 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 22:50:40 crc kubenswrapper[4755]: E0202 22:50:40.965951 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert podName:44ef88e4-d62d-4f16-ab3c-15b7136ac5c9 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:42.965932598 +0000 UTC m=+998.657152924 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert") pod "infra-operator-controller-manager-79955696d6-zh58s" (UID: "44ef88e4-d62d-4f16-ab3c-15b7136ac5c9") : secret "infra-operator-webhook-server-cert" not found Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.100231 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" event={"ID":"ec4fb29e-f536-46ec-bd89-5b212f2a5d13","Type":"ContainerStarted","Data":"b0b719046553c7bc88d2d8f2410ef72ddb1c7822fb81def8c9e4f1babec666b3"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.101682 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" event={"ID":"62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b","Type":"ContainerStarted","Data":"2f7934ca662f64f315206f2232ee875aa5b03579e3d45cf1cc132f04d43c8de5"} Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.103942 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:319c969e88f109b26487a9f5a67203682803d7386424703ab7ca0340be99ae17\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" podUID="62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b" Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.104038 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" event={"ID":"1734ceac-dd78-4d4b-986c-5a3c27c3c48f","Type":"ContainerStarted","Data":"7cb07324275a68347efa69f625af3f4e398bbc4c05a6dc0f6cc2231f0543a368"} Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.105312 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" podUID="1734ceac-dd78-4d4b-986c-5a3c27c3c48f" Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.105334 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7" event={"ID":"62edcea1-a12a-428b-bec6-d5c14bcb2d9d","Type":"ContainerStarted","Data":"ec9f2af29c1cc1b66e2c3a8a35c73589d15bb578aff5483d8447689cb122efb8"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.106667 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq" event={"ID":"f8a6ce9f-fde2-4696-9302-7edb0a04d233","Type":"ContainerStarted","Data":"9d062c15423f4f4454ca4569038980d4280184c99fc332bf86789d2d484efc81"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.108102 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" event={"ID":"afe1c6d0-666e-46f4-93f9-a814399d699b","Type":"ContainerStarted","Data":"eb7f4d77fe92538538009dd89a548d4ae7dc7750657ff0f0dcc97dda395308fb"} Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.109351 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e\\\"\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" podUID="afe1c6d0-666e-46f4-93f9-a814399d699b" Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.109369 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r" event={"ID":"7cdb62bb-2e9f-43c8-a6ac-5e05577fb7bd","Type":"ContainerStarted","Data":"4a31dd3ad46fe140aa6ae587f0cfbbd8259d872b08e5f93b07e0f25cc7fbd092"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.110585 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" event={"ID":"368193dc-45fb-4dff-8c24-8c38a7fd56da","Type":"ContainerStarted","Data":"7c82ee5c3cbd15568b30ffca1fd92b7d506121a8fde367533113b5ceb58be122"} Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.111369 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" podUID="368193dc-45fb-4dff-8c24-8c38a7fd56da" Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.111659 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" event={"ID":"04cd6a32-6398-4f89-b034-3a9ebf8da40b","Type":"ContainerStarted","Data":"42c2f267f6e75f8395a514f727a15ae007c60eda9ded7e9a16d33eea505a8012"} Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.111883 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:e0824d5d461ada59715eb3048ed9394c80abba09c45503f8f90ee3b34e525488\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" podUID="ec4fb29e-f536-46ec-bd89-5b212f2a5d13" Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.112391 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh" event={"ID":"b5d1ece3-a9d2-4620-98f2-2bd2ff66184d","Type":"ContainerStarted","Data":"ad073cf3b15d5821a09016b34c8340658aad48808ea37e204a71de7eccaeb121"} Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.112475 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" podUID="04cd6a32-6398-4f89-b034-3a9ebf8da40b" Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.114065 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j" event={"ID":"67dc65c3-ffb5-4139-b405-87a180ddb551","Type":"ContainerStarted","Data":"1772e491f0630be150b548a47456fe4bac6edfb1ee71ece7d42cd46e468fc8db"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.115199 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h" event={"ID":"06cf134b-94e8-4945-b882-bc54dd5c5045","Type":"ContainerStarted","Data":"5730255b5fcfceaa1c764cec44734e3b4d7db96934308020fce040c398a8af31"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.116113 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk" event={"ID":"c89c93bd-725d-4cb4-9464-22674774af64","Type":"ContainerStarted","Data":"6ba3361820c785acaa6311f15698662b62fab89ae107fdaf0ffe27a1db238b17"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.117075 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7" event={"ID":"cc7aea9e-48ae-4d78-835e-3516d8bdd1e0","Type":"ContainerStarted","Data":"229d11b306597f47427864edeee781f752bcfe77a607c14523b5053ff24dedca"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.119921 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw" event={"ID":"5073a3de-5fb1-4375-9db4-a7009d6b8799","Type":"ContainerStarted","Data":"6daa36b52b84828cd00f05d8028d64ec8fb3d08c7dc0c8e32742ba132ec73fc5"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.127086 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk" event={"ID":"b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5","Type":"ContainerStarted","Data":"7b033fb7c6dda3d8c77b92133b11958f34fe34959789995dbe03f1c21498717e"} Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.128326 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk" podUID="b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5" Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.129139 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn" event={"ID":"49a76ede-9115-4a09-b344-f7e130018c83","Type":"ContainerStarted","Data":"b3118883c0132ae51640530b8da9ac7ba6db0e6a45c4bb7281a65bbb35ad4766"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.130434 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-644bb" event={"ID":"e9675ae3-4e81-4adb-85a0-cd21ac496df2","Type":"ContainerStarted","Data":"144d51365348590732f3648c45531fdc056361c5f0ae14f916c5a3d29ae1feed"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.133584 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2" event={"ID":"fd656823-f4de-4e4d-a109-7a180552abd1","Type":"ContainerStarted","Data":"a7012bec88d3c77ffb8e607abcd7d6eb32e68dccf3a8ea5038cfdde50a8ed0cc"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.139747 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln" event={"ID":"4bf6953c-28a8-49f6-b850-d6572decd288","Type":"ContainerStarted","Data":"32551276cc07685fb00ab53d224aa362a461afe9e2d5ab109bc1b8120903da36"} Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.171222 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs\" (UID: \"28233917-7a5b-4379-aa43-c42633f51848\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.172000 4755 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.172298 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert podName:28233917-7a5b-4379-aa43-c42633f51848 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:43.172278162 +0000 UTC m=+998.863498488 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" (UID: "28233917-7a5b-4379-aa43-c42633f51848") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.476803 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:41 crc kubenswrapper[4755]: I0202 22:50:41.476915 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.477009 4755 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.477091 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs podName:9960033f-69b1-4b1c-9e06-aaf5e6d61559 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:43.477073996 +0000 UTC m=+999.168294312 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs") pod "openstack-operator-controller-manager-bc8597898-njwc6" (UID: "9960033f-69b1-4b1c-9e06-aaf5e6d61559") : secret "metrics-server-cert" not found Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.477381 4755 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 22:50:41 crc kubenswrapper[4755]: E0202 22:50:41.477477 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs podName:9960033f-69b1-4b1c-9e06-aaf5e6d61559 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:43.477452027 +0000 UTC m=+999.168672343 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs") pod "openstack-operator-controller-manager-bc8597898-njwc6" (UID: "9960033f-69b1-4b1c-9e06-aaf5e6d61559") : secret "webhook-server-cert" not found Feb 02 22:50:42 crc kubenswrapper[4755]: E0202 22:50:42.150146 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:e0824d5d461ada59715eb3048ed9394c80abba09c45503f8f90ee3b34e525488\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" podUID="ec4fb29e-f536-46ec-bd89-5b212f2a5d13" Feb 02 22:50:42 crc kubenswrapper[4755]: E0202 22:50:42.150298 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:319c969e88f109b26487a9f5a67203682803d7386424703ab7ca0340be99ae17\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" podUID="62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b" Feb 02 22:50:42 crc kubenswrapper[4755]: E0202 22:50:42.150385 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk" podUID="b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5" Feb 02 22:50:42 crc kubenswrapper[4755]: E0202 22:50:42.151744 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:3e01e99d3ca1b6c20b1bb015b00cfcbffc584f22a93dc6fe4019d63b813c0241\\\"\"" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" podUID="368193dc-45fb-4dff-8c24-8c38a7fd56da" Feb 02 22:50:42 crc kubenswrapper[4755]: E0202 22:50:42.151853 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:5340b88039fac393da49ef4e181b2720c809c27a6bb30531a07a49342a1da45e\\\"\"" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" podUID="afe1c6d0-666e-46f4-93f9-a814399d699b" Feb 02 22:50:42 crc kubenswrapper[4755]: E0202 22:50:42.152567 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:e6f2f361f1dcbb321407a5884951e16ff96e7b88942b10b548f27ad4de14a0be\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" podUID="1734ceac-dd78-4d4b-986c-5a3c27c3c48f" Feb 02 22:50:42 crc kubenswrapper[4755]: E0202 22:50:42.153005 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" podUID="04cd6a32-6398-4f89-b034-3a9ebf8da40b" Feb 02 22:50:43 crc kubenswrapper[4755]: I0202 22:50:43.007508 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert\") pod \"infra-operator-controller-manager-79955696d6-zh58s\" (UID: \"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:50:43 crc kubenswrapper[4755]: E0202 22:50:43.007773 4755 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 22:50:43 crc kubenswrapper[4755]: E0202 22:50:43.007882 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert podName:44ef88e4-d62d-4f16-ab3c-15b7136ac5c9 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:47.007854266 +0000 UTC m=+1002.699074622 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert") pod "infra-operator-controller-manager-79955696d6-zh58s" (UID: "44ef88e4-d62d-4f16-ab3c-15b7136ac5c9") : secret "infra-operator-webhook-server-cert" not found Feb 02 22:50:43 crc kubenswrapper[4755]: I0202 22:50:43.212932 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs\" (UID: \"28233917-7a5b-4379-aa43-c42633f51848\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:50:43 crc kubenswrapper[4755]: E0202 22:50:43.213150 4755 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 22:50:43 crc kubenswrapper[4755]: E0202 22:50:43.213282 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert podName:28233917-7a5b-4379-aa43-c42633f51848 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:47.213255964 +0000 UTC m=+1002.904476300 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" (UID: "28233917-7a5b-4379-aa43-c42633f51848") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 22:50:43 crc kubenswrapper[4755]: I0202 22:50:43.517454 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:43 crc kubenswrapper[4755]: E0202 22:50:43.517684 4755 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 22:50:43 crc kubenswrapper[4755]: E0202 22:50:43.517780 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs podName:9960033f-69b1-4b1c-9e06-aaf5e6d61559 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:47.517754139 +0000 UTC m=+1003.208974525 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs") pod "openstack-operator-controller-manager-bc8597898-njwc6" (UID: "9960033f-69b1-4b1c-9e06-aaf5e6d61559") : secret "metrics-server-cert" not found Feb 02 22:50:43 crc kubenswrapper[4755]: I0202 22:50:43.518101 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:43 crc kubenswrapper[4755]: E0202 22:50:43.518241 4755 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 22:50:43 crc kubenswrapper[4755]: E0202 22:50:43.518303 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs podName:9960033f-69b1-4b1c-9e06-aaf5e6d61559 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:47.518289014 +0000 UTC m=+1003.209509340 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs") pod "openstack-operator-controller-manager-bc8597898-njwc6" (UID: "9960033f-69b1-4b1c-9e06-aaf5e6d61559") : secret "webhook-server-cert" not found Feb 02 22:50:47 crc kubenswrapper[4755]: I0202 22:50:47.086527 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert\") pod \"infra-operator-controller-manager-79955696d6-zh58s\" (UID: \"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:50:47 crc kubenswrapper[4755]: E0202 22:50:47.086858 4755 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 22:50:47 crc kubenswrapper[4755]: E0202 22:50:47.087115 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert podName:44ef88e4-d62d-4f16-ab3c-15b7136ac5c9 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:55.08709275 +0000 UTC m=+1010.778313076 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert") pod "infra-operator-controller-manager-79955696d6-zh58s" (UID: "44ef88e4-d62d-4f16-ab3c-15b7136ac5c9") : secret "infra-operator-webhook-server-cert" not found Feb 02 22:50:47 crc kubenswrapper[4755]: I0202 22:50:47.289290 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs\" (UID: \"28233917-7a5b-4379-aa43-c42633f51848\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:50:47 crc kubenswrapper[4755]: E0202 22:50:47.289469 4755 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 22:50:47 crc kubenswrapper[4755]: E0202 22:50:47.289553 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert podName:28233917-7a5b-4379-aa43-c42633f51848 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:55.289529064 +0000 UTC m=+1010.980749390 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" (UID: "28233917-7a5b-4379-aa43-c42633f51848") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 22:50:47 crc kubenswrapper[4755]: I0202 22:50:47.592605 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:47 crc kubenswrapper[4755]: I0202 22:50:47.592722 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:47 crc kubenswrapper[4755]: E0202 22:50:47.592917 4755 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 22:50:47 crc kubenswrapper[4755]: E0202 22:50:47.592946 4755 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 22:50:47 crc kubenswrapper[4755]: E0202 22:50:47.592981 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs podName:9960033f-69b1-4b1c-9e06-aaf5e6d61559 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:55.59296332 +0000 UTC m=+1011.284183646 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs") pod "openstack-operator-controller-manager-bc8597898-njwc6" (UID: "9960033f-69b1-4b1c-9e06-aaf5e6d61559") : secret "webhook-server-cert" not found Feb 02 22:50:47 crc kubenswrapper[4755]: E0202 22:50:47.593037 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs podName:9960033f-69b1-4b1c-9e06-aaf5e6d61559 nodeName:}" failed. No retries permitted until 2026-02-02 22:50:55.593009891 +0000 UTC m=+1011.284230257 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs") pod "openstack-operator-controller-manager-bc8597898-njwc6" (UID: "9960033f-69b1-4b1c-9e06-aaf5e6d61559") : secret "metrics-server-cert" not found Feb 02 22:50:52 crc kubenswrapper[4755]: E0202 22:50:52.208486 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.38:5001/openstack-k8s-operators/telemetry-operator:a5bcf05e2d71c610156d017fdf197f7c58570d79" Feb 02 22:50:52 crc kubenswrapper[4755]: E0202 22:50:52.209177 4755 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.38:5001/openstack-k8s-operators/telemetry-operator:a5bcf05e2d71c610156d017fdf197f7c58570d79" Feb 02 22:50:52 crc kubenswrapper[4755]: E0202 22:50:52.209403 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.38:5001/openstack-k8s-operators/telemetry-operator:a5bcf05e2d71c610156d017fdf197f7c58570d79,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6zxqd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5b96584f66-98jt2_openstack-operators(fd656823-f4de-4e4d-a109-7a180552abd1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 22:50:52 crc kubenswrapper[4755]: E0202 22:50:52.210660 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2" podUID="fd656823-f4de-4e4d-a109-7a180552abd1" Feb 02 22:50:52 crc kubenswrapper[4755]: E0202 22:50:52.245556 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.38:5001/openstack-k8s-operators/telemetry-operator:a5bcf05e2d71c610156d017fdf197f7c58570d79\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2" podUID="fd656823-f4de-4e4d-a109-7a180552abd1" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.241828 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j" event={"ID":"67dc65c3-ffb5-4139-b405-87a180ddb551","Type":"ContainerStarted","Data":"d71e67f534259299716ba35e8cb4b75f3eee769be214608512266bb9b315d972"} Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.242164 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.243689 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq" event={"ID":"f8a6ce9f-fde2-4696-9302-7edb0a04d233","Type":"ContainerStarted","Data":"f0bf94828d19037ec4eda1d6e20bf7c653112b758f60313201035b3dda76aa3c"} Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.244266 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.245639 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-644bb" event={"ID":"e9675ae3-4e81-4adb-85a0-cd21ac496df2","Type":"ContainerStarted","Data":"be13e8ec729bfe78fb7ec60f2ff02baf5e81932b0a528e16e9abb9201a86b22e"} Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.245977 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-644bb" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.247656 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r" event={"ID":"7cdb62bb-2e9f-43c8-a6ac-5e05577fb7bd","Type":"ContainerStarted","Data":"6dd7a2cfb906ffb7d72e5be8750a5a283fe1a8a1d3ac4c47800df2929137c408"} Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.248001 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.249461 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln" event={"ID":"4bf6953c-28a8-49f6-b850-d6572decd288","Type":"ContainerStarted","Data":"760dc62b4ec71af1f626152356caa0d39424be32a1bbca59bfa184bad5cbfa31"} Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.249528 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.251082 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7" event={"ID":"cc7aea9e-48ae-4d78-835e-3516d8bdd1e0","Type":"ContainerStarted","Data":"6be5b921c1351795d67155b03fef9a212f3f43469cb7bdfe13c4ef13bc4ce3bb"} Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.251657 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.262698 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh" event={"ID":"b5d1ece3-a9d2-4620-98f2-2bd2ff66184d","Type":"ContainerStarted","Data":"c7875a45d2265da6facebb4dfc0a65e0cc837465b5ff3cbd2729bb8186a38a96"} Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.263400 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.264266 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn" event={"ID":"49a76ede-9115-4a09-b344-f7e130018c83","Type":"ContainerStarted","Data":"307ad51c2af1a6e24e90dac0065cdbe0c2cbaa08e088edf7817321521c3c814c"} Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.264637 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.275928 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7" event={"ID":"62edcea1-a12a-428b-bec6-d5c14bcb2d9d","Type":"ContainerStarted","Data":"7e1847b1edaff731425a5a7016c68275176599350a1a3141ebad623f54adaefb"} Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.277002 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.286494 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h" event={"ID":"06cf134b-94e8-4945-b882-bc54dd5c5045","Type":"ContainerStarted","Data":"d75df77fe48e2b920f95cd24a475df669f54d53ae79b3dc814526b4141610187"} Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.287038 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.291707 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j" podStartSLOduration=2.660955233 podStartE2EDuration="14.29169521s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.654171131 +0000 UTC m=+996.345391457" lastFinishedPulling="2026-02-02 22:50:52.284911068 +0000 UTC m=+1007.976131434" observedRunningTime="2026-02-02 22:50:53.276517326 +0000 UTC m=+1008.967737652" watchObservedRunningTime="2026-02-02 22:50:53.29169521 +0000 UTC m=+1008.982915536" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.301143 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk" event={"ID":"c89c93bd-725d-4cb4-9464-22674774af64","Type":"ContainerStarted","Data":"c74124f6b3c2dadbbaf79e84dae92e03081d2c02aade5652825204562725b2b2"} Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.302589 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.306901 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw" event={"ID":"5073a3de-5fb1-4375-9db4-a7009d6b8799","Type":"ContainerStarted","Data":"71478c080df7dd75b7a8d075efb4380586e0fb284e7765460e4b6b8526befe19"} Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.307489 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.316578 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r" podStartSLOduration=2.559650124 podStartE2EDuration="14.316568355s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.489065079 +0000 UTC m=+996.180285405" lastFinishedPulling="2026-02-02 22:50:52.24598327 +0000 UTC m=+1007.937203636" observedRunningTime="2026-02-02 22:50:53.314691752 +0000 UTC m=+1009.005912088" watchObservedRunningTime="2026-02-02 22:50:53.316568355 +0000 UTC m=+1009.007788681" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.342230 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7" podStartSLOduration=2.3980468090000002 podStartE2EDuration="14.342209411s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.28790484 +0000 UTC m=+995.979125166" lastFinishedPulling="2026-02-02 22:50:52.232067402 +0000 UTC m=+1007.923287768" observedRunningTime="2026-02-02 22:50:53.340056711 +0000 UTC m=+1009.031277057" watchObservedRunningTime="2026-02-02 22:50:53.342209411 +0000 UTC m=+1009.033429727" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.398063 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7" podStartSLOduration=2.596682958 podStartE2EDuration="14.398048621s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.472194348 +0000 UTC m=+996.163414674" lastFinishedPulling="2026-02-02 22:50:52.273559991 +0000 UTC m=+1007.964780337" observedRunningTime="2026-02-02 22:50:53.377064755 +0000 UTC m=+1009.068285081" watchObservedRunningTime="2026-02-02 22:50:53.398048621 +0000 UTC m=+1009.089268947" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.400416 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln" podStartSLOduration=2.730307941 podStartE2EDuration="14.400411267s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.644488321 +0000 UTC m=+996.335708647" lastFinishedPulling="2026-02-02 22:50:52.314591647 +0000 UTC m=+1008.005811973" observedRunningTime="2026-02-02 22:50:53.396439876 +0000 UTC m=+1009.087660202" watchObservedRunningTime="2026-02-02 22:50:53.400411267 +0000 UTC m=+1009.091631593" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.427249 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn" podStartSLOduration=2.6618484479999998 podStartE2EDuration="14.427235026s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.490626893 +0000 UTC m=+996.181847219" lastFinishedPulling="2026-02-02 22:50:52.256013441 +0000 UTC m=+1007.947233797" observedRunningTime="2026-02-02 22:50:53.423204953 +0000 UTC m=+1009.114425279" watchObservedRunningTime="2026-02-02 22:50:53.427235026 +0000 UTC m=+1009.118455352" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.469659 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh" podStartSLOduration=3.112782954 podStartE2EDuration="14.469643821s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.946255809 +0000 UTC m=+996.637476135" lastFinishedPulling="2026-02-02 22:50:52.303116666 +0000 UTC m=+1007.994337002" observedRunningTime="2026-02-02 22:50:53.455009142 +0000 UTC m=+1009.146229468" watchObservedRunningTime="2026-02-02 22:50:53.469643821 +0000 UTC m=+1009.160864147" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.470213 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq" podStartSLOduration=2.671817436 podStartE2EDuration="14.470205296s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.491825586 +0000 UTC m=+996.183045912" lastFinishedPulling="2026-02-02 22:50:52.290213436 +0000 UTC m=+1007.981433772" observedRunningTime="2026-02-02 22:50:53.464165118 +0000 UTC m=+1009.155385444" watchObservedRunningTime="2026-02-02 22:50:53.470205296 +0000 UTC m=+1009.161425622" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.500746 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-644bb" podStartSLOduration=2.850247591 podStartE2EDuration="14.500716709s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.640955372 +0000 UTC m=+996.332175698" lastFinishedPulling="2026-02-02 22:50:52.29142448 +0000 UTC m=+1007.982644816" observedRunningTime="2026-02-02 22:50:53.495103292 +0000 UTC m=+1009.186323618" watchObservedRunningTime="2026-02-02 22:50:53.500716709 +0000 UTC m=+1009.191937035" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.522390 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h" podStartSLOduration=2.560752115 podStartE2EDuration="14.522373124s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.270483834 +0000 UTC m=+995.961704160" lastFinishedPulling="2026-02-02 22:50:52.232104803 +0000 UTC m=+1007.923325169" observedRunningTime="2026-02-02 22:50:53.516560121 +0000 UTC m=+1009.207780437" watchObservedRunningTime="2026-02-02 22:50:53.522373124 +0000 UTC m=+1009.213593450" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.530483 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk" podStartSLOduration=2.874332354 podStartE2EDuration="14.53047291s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.654617914 +0000 UTC m=+996.345838240" lastFinishedPulling="2026-02-02 22:50:52.31075848 +0000 UTC m=+1008.001978796" observedRunningTime="2026-02-02 22:50:53.529815511 +0000 UTC m=+1009.221035847" watchObservedRunningTime="2026-02-02 22:50:53.53047291 +0000 UTC m=+1009.221693236" Feb 02 22:50:53 crc kubenswrapper[4755]: I0202 22:50:53.557282 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw" podStartSLOduration=3.040890865 podStartE2EDuration="14.557259428s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.764231654 +0000 UTC m=+996.455451980" lastFinishedPulling="2026-02-02 22:50:52.280600197 +0000 UTC m=+1007.971820543" observedRunningTime="2026-02-02 22:50:53.549977275 +0000 UTC m=+1009.241197601" watchObservedRunningTime="2026-02-02 22:50:53.557259428 +0000 UTC m=+1009.248479754" Feb 02 22:50:55 crc kubenswrapper[4755]: I0202 22:50:55.125720 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert\") pod \"infra-operator-controller-manager-79955696d6-zh58s\" (UID: \"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:50:55 crc kubenswrapper[4755]: E0202 22:50:55.125913 4755 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 02 22:50:55 crc kubenswrapper[4755]: E0202 22:50:55.126078 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert podName:44ef88e4-d62d-4f16-ab3c-15b7136ac5c9 nodeName:}" failed. No retries permitted until 2026-02-02 22:51:11.126061978 +0000 UTC m=+1026.817282304 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert") pod "infra-operator-controller-manager-79955696d6-zh58s" (UID: "44ef88e4-d62d-4f16-ab3c-15b7136ac5c9") : secret "infra-operator-webhook-server-cert" not found Feb 02 22:50:55 crc kubenswrapper[4755]: I0202 22:50:55.328590 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs\" (UID: \"28233917-7a5b-4379-aa43-c42633f51848\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:50:55 crc kubenswrapper[4755]: E0202 22:50:55.328882 4755 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 22:50:55 crc kubenswrapper[4755]: E0202 22:50:55.328937 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert podName:28233917-7a5b-4379-aa43-c42633f51848 nodeName:}" failed. No retries permitted until 2026-02-02 22:51:11.328922234 +0000 UTC m=+1027.020142570 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert") pod "openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" (UID: "28233917-7a5b-4379-aa43-c42633f51848") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 02 22:50:55 crc kubenswrapper[4755]: I0202 22:50:55.632527 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:55 crc kubenswrapper[4755]: I0202 22:50:55.632625 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:50:55 crc kubenswrapper[4755]: E0202 22:50:55.632758 4755 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 02 22:50:55 crc kubenswrapper[4755]: E0202 22:50:55.632807 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs podName:9960033f-69b1-4b1c-9e06-aaf5e6d61559 nodeName:}" failed. No retries permitted until 2026-02-02 22:51:11.632791742 +0000 UTC m=+1027.324012068 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs") pod "openstack-operator-controller-manager-bc8597898-njwc6" (UID: "9960033f-69b1-4b1c-9e06-aaf5e6d61559") : secret "webhook-server-cert" not found Feb 02 22:50:55 crc kubenswrapper[4755]: E0202 22:50:55.633182 4755 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 02 22:50:55 crc kubenswrapper[4755]: E0202 22:50:55.633219 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs podName:9960033f-69b1-4b1c-9e06-aaf5e6d61559 nodeName:}" failed. No retries permitted until 2026-02-02 22:51:11.633211554 +0000 UTC m=+1027.324431880 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs") pod "openstack-operator-controller-manager-bc8597898-njwc6" (UID: "9960033f-69b1-4b1c-9e06-aaf5e6d61559") : secret "metrics-server-cert" not found Feb 02 22:50:59 crc kubenswrapper[4755]: I0202 22:50:59.365808 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7b6c4d8c5f-lpnrn" Feb 02 22:50:59 crc kubenswrapper[4755]: I0202 22:50:59.394572 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-8d874c8fc-gfmgq" Feb 02 22:50:59 crc kubenswrapper[4755]: I0202 22:50:59.404592 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-6d9697b7f4-sgwm7" Feb 02 22:50:59 crc kubenswrapper[4755]: I0202 22:50:59.435373 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-8886f4c47-4br2h" Feb 02 22:50:59 crc kubenswrapper[4755]: I0202 22:50:59.471851 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-69d6db494d-lz7b7" Feb 02 22:50:59 crc kubenswrapper[4755]: I0202 22:50:59.529391 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5fb775575f-hvd6r" Feb 02 22:50:59 crc kubenswrapper[4755]: I0202 22:50:59.638828 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5f4b8bd54d-hlqln" Feb 02 22:50:59 crc kubenswrapper[4755]: I0202 22:50:59.750380 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7dd968899f-f8k9j" Feb 02 22:50:59 crc kubenswrapper[4755]: I0202 22:50:59.783445 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67bf948998-h69gk" Feb 02 22:50:59 crc kubenswrapper[4755]: I0202 22:50:59.811499 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-585dbc889-644bb" Feb 02 22:50:59 crc kubenswrapper[4755]: I0202 22:50:59.865228 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-788c46999f-5q9mw" Feb 02 22:50:59 crc kubenswrapper[4755]: I0202 22:50:59.950306 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-68fc8c869-cv8gh" Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.382917 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" event={"ID":"04cd6a32-6398-4f89-b034-3a9ebf8da40b","Type":"ContainerStarted","Data":"a37469e1dbd6120c700ba22579a407b853155c5a6adcf653962b1af227e580c3"} Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.384101 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" event={"ID":"1734ceac-dd78-4d4b-986c-5a3c27c3c48f","Type":"ContainerStarted","Data":"a9d5b0b12ef35f5d80d2f2f45ac294d9f1cc459fdb60cde159aec4ecd12dea94"} Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.385463 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.385492 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.398945 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" event={"ID":"afe1c6d0-666e-46f4-93f9-a814399d699b","Type":"ContainerStarted","Data":"77e45f33b15b3a8fff7cf70248359a2ca4935fba78331bd4c172d48e6b0be10b"} Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.399520 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.400975 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" event={"ID":"368193dc-45fb-4dff-8c24-8c38a7fd56da","Type":"ContainerStarted","Data":"c63c8810b041fcc475cb81b536a6f6f3d90e8fed0fc64fb27b4d591fdb3654f4"} Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.401311 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.408032 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" event={"ID":"ec4fb29e-f536-46ec-bd89-5b212f2a5d13","Type":"ContainerStarted","Data":"247b10e4067b31e1bfe05b34c3891412b5113a1d0ae0e4e09902a29736cf2749"} Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.408410 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.413317 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" podStartSLOduration=2.822568856 podStartE2EDuration="21.413309244s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.779774378 +0000 UTC m=+996.470994694" lastFinishedPulling="2026-02-02 22:50:59.370514756 +0000 UTC m=+1015.061735082" observedRunningTime="2026-02-02 22:51:00.40885083 +0000 UTC m=+1016.100071156" watchObservedRunningTime="2026-02-02 22:51:00.413309244 +0000 UTC m=+1016.104529570" Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.418116 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" event={"ID":"62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b","Type":"ContainerStarted","Data":"80fd2f991c0d5fec22ec9c785eb7d307b961e48b2a6036ab8d3a6560f6bfb157"} Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.418271 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.427812 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" podStartSLOduration=3.057200951 podStartE2EDuration="21.427788399s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.95739764 +0000 UTC m=+996.648617966" lastFinishedPulling="2026-02-02 22:50:59.327985088 +0000 UTC m=+1015.019205414" observedRunningTime="2026-02-02 22:51:00.427670566 +0000 UTC m=+1016.118890892" watchObservedRunningTime="2026-02-02 22:51:00.427788399 +0000 UTC m=+1016.119008725" Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.445971 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" podStartSLOduration=3.121188688 podStartE2EDuration="21.445951606s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.95703666 +0000 UTC m=+996.648256986" lastFinishedPulling="2026-02-02 22:50:59.281799578 +0000 UTC m=+1014.973019904" observedRunningTime="2026-02-02 22:51:00.440296128 +0000 UTC m=+1016.131516454" watchObservedRunningTime="2026-02-02 22:51:00.445951606 +0000 UTC m=+1016.137171932" Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.457426 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" podStartSLOduration=2.904624209 podStartE2EDuration="21.457407896s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.816979088 +0000 UTC m=+996.508199414" lastFinishedPulling="2026-02-02 22:50:59.369762775 +0000 UTC m=+1015.060983101" observedRunningTime="2026-02-02 22:51:00.45326355 +0000 UTC m=+1016.144483876" watchObservedRunningTime="2026-02-02 22:51:00.457407896 +0000 UTC m=+1016.148628222" Feb 02 22:51:00 crc kubenswrapper[4755]: I0202 22:51:00.468006 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" podStartSLOduration=2.9497525700000002 podStartE2EDuration="21.467986762s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.809820268 +0000 UTC m=+996.501040594" lastFinishedPulling="2026-02-02 22:50:59.32805446 +0000 UTC m=+1015.019274786" observedRunningTime="2026-02-02 22:51:00.467155969 +0000 UTC m=+1016.158376295" watchObservedRunningTime="2026-02-02 22:51:00.467986762 +0000 UTC m=+1016.159207088" Feb 02 22:51:02 crc kubenswrapper[4755]: I0202 22:51:02.446047 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk" event={"ID":"b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5","Type":"ContainerStarted","Data":"855921f891b786ae79f6c8af20970f85d1a172d5f75e8265061f51389f091a64"} Feb 02 22:51:02 crc kubenswrapper[4755]: I0202 22:51:02.471853 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-lpvlk" podStartSLOduration=2.344343608 podStartE2EDuration="23.471833614s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.949187081 +0000 UTC m=+996.640407407" lastFinishedPulling="2026-02-02 22:51:02.076677087 +0000 UTC m=+1017.767897413" observedRunningTime="2026-02-02 22:51:02.471116784 +0000 UTC m=+1018.162337120" watchObservedRunningTime="2026-02-02 22:51:02.471833614 +0000 UTC m=+1018.163053950" Feb 02 22:51:02 crc kubenswrapper[4755]: I0202 22:51:02.472669 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" podStartSLOduration=4.914002805 podStartE2EDuration="23.472661067s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.779007657 +0000 UTC m=+996.470227983" lastFinishedPulling="2026-02-02 22:50:59.337665919 +0000 UTC m=+1015.028886245" observedRunningTime="2026-02-02 22:51:00.48652192 +0000 UTC m=+1016.177742236" watchObservedRunningTime="2026-02-02 22:51:02.472661067 +0000 UTC m=+1018.163881403" Feb 02 22:51:04 crc kubenswrapper[4755]: I0202 22:51:04.467023 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2" event={"ID":"fd656823-f4de-4e4d-a109-7a180552abd1","Type":"ContainerStarted","Data":"7cfbb2426cc21b5c68f8652933b18eaf726cd73f415d207ec8e1d803d0cfa8b4"} Feb 02 22:51:04 crc kubenswrapper[4755]: I0202 22:51:04.467526 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2" Feb 02 22:51:04 crc kubenswrapper[4755]: I0202 22:51:04.488061 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2" podStartSLOduration=2.8385878350000002 podStartE2EDuration="25.488036702s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:50:40.770969693 +0000 UTC m=+996.462190019" lastFinishedPulling="2026-02-02 22:51:03.42041852 +0000 UTC m=+1019.111638886" observedRunningTime="2026-02-02 22:51:04.48725209 +0000 UTC m=+1020.178472426" watchObservedRunningTime="2026-02-02 22:51:04.488036702 +0000 UTC m=+1020.179257048" Feb 02 22:51:09 crc kubenswrapper[4755]: I0202 22:51:09.719344 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-84f48565d4-rj689" Feb 02 22:51:09 crc kubenswrapper[4755]: I0202 22:51:09.836682 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-55bff696bd-lm5vg" Feb 02 22:51:09 crc kubenswrapper[4755]: I0202 22:51:09.846815 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6687f8d877-dwrws" Feb 02 22:51:09 crc kubenswrapper[4755]: I0202 22:51:09.929623 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5b964cf4cd-7p67q" Feb 02 22:51:09 crc kubenswrapper[4755]: I0202 22:51:09.967704 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5b96584f66-98jt2" Feb 02 22:51:10 crc kubenswrapper[4755]: I0202 22:51:10.168675 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-56f8bfcd9f-6xtvs" Feb 02 22:51:10 crc kubenswrapper[4755]: I0202 22:51:10.184795 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-564965969-5prbd" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.211258 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert\") pod \"infra-operator-controller-manager-79955696d6-zh58s\" (UID: \"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.220551 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44ef88e4-d62d-4f16-ab3c-15b7136ac5c9-cert\") pod \"infra-operator-controller-manager-79955696d6-zh58s\" (UID: \"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9\") " pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.414912 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs\" (UID: \"28233917-7a5b-4379-aa43-c42633f51848\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.421312 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28233917-7a5b-4379-aa43-c42633f51848-cert\") pod \"openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs\" (UID: \"28233917-7a5b-4379-aa43-c42633f51848\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.422408 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-88dqm" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.429961 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.697189 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-9sk9v" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.705233 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.719978 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.720095 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.726914 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-webhook-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.728892 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9960033f-69b1-4b1c-9e06-aaf5e6d61559-metrics-certs\") pod \"openstack-operator-controller-manager-bc8597898-njwc6\" (UID: \"9960033f-69b1-4b1c-9e06-aaf5e6d61559\") " pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.742640 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-79955696d6-zh58s"] Feb 02 22:51:11 crc kubenswrapper[4755]: I0202 22:51:11.984487 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs"] Feb 02 22:51:12 crc kubenswrapper[4755]: I0202 22:51:12.009034 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-5qg9p" Feb 02 22:51:12 crc kubenswrapper[4755]: I0202 22:51:12.016818 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:51:12 crc kubenswrapper[4755]: I0202 22:51:12.508653 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6"] Feb 02 22:51:12 crc kubenswrapper[4755]: W0202 22:51:12.514641 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9960033f_69b1_4b1c_9e06_aaf5e6d61559.slice/crio-5b8866473f30060055dbd39db016d7fe1dad257b82401a55d5cf4acafe231d42 WatchSource:0}: Error finding container 5b8866473f30060055dbd39db016d7fe1dad257b82401a55d5cf4acafe231d42: Status 404 returned error can't find the container with id 5b8866473f30060055dbd39db016d7fe1dad257b82401a55d5cf4acafe231d42 Feb 02 22:51:12 crc kubenswrapper[4755]: I0202 22:51:12.545042 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" event={"ID":"28233917-7a5b-4379-aa43-c42633f51848","Type":"ContainerStarted","Data":"97411711320c58449cbe02ecb12deeeef8935659037680a70b83477676fcfb0b"} Feb 02 22:51:12 crc kubenswrapper[4755]: I0202 22:51:12.546408 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" event={"ID":"9960033f-69b1-4b1c-9e06-aaf5e6d61559","Type":"ContainerStarted","Data":"5b8866473f30060055dbd39db016d7fe1dad257b82401a55d5cf4acafe231d42"} Feb 02 22:51:12 crc kubenswrapper[4755]: I0202 22:51:12.549781 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" event={"ID":"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9","Type":"ContainerStarted","Data":"de99e77f8bff01e7bc8710110254679ddaba208fe04deac1484b577927130ad7"} Feb 02 22:51:16 crc kubenswrapper[4755]: I0202 22:51:16.592130 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" event={"ID":"9960033f-69b1-4b1c-9e06-aaf5e6d61559","Type":"ContainerStarted","Data":"b09cbb7fb8d5500ac0ee0c155dc4f2490a1f8ff41aee4c57cc288371e62c4861"} Feb 02 22:51:16 crc kubenswrapper[4755]: I0202 22:51:16.592916 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:51:16 crc kubenswrapper[4755]: I0202 22:51:16.645030 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" podStartSLOduration=37.644993176 podStartE2EDuration="37.644993176s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:51:16.638049872 +0000 UTC m=+1032.329270268" watchObservedRunningTime="2026-02-02 22:51:16.644993176 +0000 UTC m=+1032.336213532" Feb 02 22:51:19 crc kubenswrapper[4755]: I0202 22:51:19.619381 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" event={"ID":"28233917-7a5b-4379-aa43-c42633f51848","Type":"ContainerStarted","Data":"3bdd2656b93f1c8b8e740752fe69ef4efba79789ac84acbdbb8b28c8d52c5ad3"} Feb 02 22:51:19 crc kubenswrapper[4755]: I0202 22:51:19.619869 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:51:19 crc kubenswrapper[4755]: I0202 22:51:19.621960 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" event={"ID":"44ef88e4-d62d-4f16-ab3c-15b7136ac5c9","Type":"ContainerStarted","Data":"52e534f77b99babf531c7622056b3ae9432a1692b552b7255dfab125002488b2"} Feb 02 22:51:19 crc kubenswrapper[4755]: I0202 22:51:19.622340 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:51:19 crc kubenswrapper[4755]: I0202 22:51:19.664462 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" podStartSLOduration=34.203089096 podStartE2EDuration="40.664431798s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:51:11.99676555 +0000 UTC m=+1027.687985876" lastFinishedPulling="2026-02-02 22:51:18.458108252 +0000 UTC m=+1034.149328578" observedRunningTime="2026-02-02 22:51:19.660550379 +0000 UTC m=+1035.351770775" watchObservedRunningTime="2026-02-02 22:51:19.664431798 +0000 UTC m=+1035.355652154" Feb 02 22:51:19 crc kubenswrapper[4755]: I0202 22:51:19.701611 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" podStartSLOduration=33.997787311 podStartE2EDuration="40.701573055s" podCreationTimestamp="2026-02-02 22:50:39 +0000 UTC" firstStartedPulling="2026-02-02 22:51:11.73937267 +0000 UTC m=+1027.430593036" lastFinishedPulling="2026-02-02 22:51:18.443158454 +0000 UTC m=+1034.134378780" observedRunningTime="2026-02-02 22:51:19.690582348 +0000 UTC m=+1035.381802714" watchObservedRunningTime="2026-02-02 22:51:19.701573055 +0000 UTC m=+1035.392793391" Feb 02 22:51:22 crc kubenswrapper[4755]: I0202 22:51:22.025044 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-bc8597898-njwc6" Feb 02 22:51:31 crc kubenswrapper[4755]: I0202 22:51:31.439493 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-79955696d6-zh58s" Feb 02 22:51:31 crc kubenswrapper[4755]: I0202 22:51:31.717327 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs" Feb 02 22:51:52 crc kubenswrapper[4755]: I0202 22:51:52.976901 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jwx9f"] Feb 02 22:51:52 crc kubenswrapper[4755]: I0202 22:51:52.978377 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" Feb 02 22:51:52 crc kubenswrapper[4755]: I0202 22:51:52.983154 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Feb 02 22:51:52 crc kubenswrapper[4755]: I0202 22:51:52.983457 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-7vz4t" Feb 02 22:51:52 crc kubenswrapper[4755]: I0202 22:51:52.983597 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Feb 02 22:51:52 crc kubenswrapper[4755]: I0202 22:51:52.999376 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.016784 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jwx9f"] Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.066667 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g54gg\" (UniqueName: \"kubernetes.io/projected/5c336de3-751a-4836-a2a3-102de5e0fd11-kube-api-access-g54gg\") pod \"dnsmasq-dns-675f4bcbfc-jwx9f\" (UID: \"5c336de3-751a-4836-a2a3-102de5e0fd11\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.066771 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c336de3-751a-4836-a2a3-102de5e0fd11-config\") pod \"dnsmasq-dns-675f4bcbfc-jwx9f\" (UID: \"5c336de3-751a-4836-a2a3-102de5e0fd11\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.083264 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5bhdq"] Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.084380 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.086192 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.087310 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5bhdq"] Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.168510 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c336de3-751a-4836-a2a3-102de5e0fd11-config\") pod \"dnsmasq-dns-675f4bcbfc-jwx9f\" (UID: \"5c336de3-751a-4836-a2a3-102de5e0fd11\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.168563 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-config\") pod \"dnsmasq-dns-78dd6ddcc-5bhdq\" (UID: \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.168581 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-5bhdq\" (UID: \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.168650 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g54gg\" (UniqueName: \"kubernetes.io/projected/5c336de3-751a-4836-a2a3-102de5e0fd11-kube-api-access-g54gg\") pod \"dnsmasq-dns-675f4bcbfc-jwx9f\" (UID: \"5c336de3-751a-4836-a2a3-102de5e0fd11\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.168686 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl959\" (UniqueName: \"kubernetes.io/projected/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-kube-api-access-gl959\") pod \"dnsmasq-dns-78dd6ddcc-5bhdq\" (UID: \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.169482 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c336de3-751a-4836-a2a3-102de5e0fd11-config\") pod \"dnsmasq-dns-675f4bcbfc-jwx9f\" (UID: \"5c336de3-751a-4836-a2a3-102de5e0fd11\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.189134 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g54gg\" (UniqueName: \"kubernetes.io/projected/5c336de3-751a-4836-a2a3-102de5e0fd11-kube-api-access-g54gg\") pod \"dnsmasq-dns-675f4bcbfc-jwx9f\" (UID: \"5c336de3-751a-4836-a2a3-102de5e0fd11\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.269530 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl959\" (UniqueName: \"kubernetes.io/projected/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-kube-api-access-gl959\") pod \"dnsmasq-dns-78dd6ddcc-5bhdq\" (UID: \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.269806 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-config\") pod \"dnsmasq-dns-78dd6ddcc-5bhdq\" (UID: \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.269835 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-5bhdq\" (UID: \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.270619 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-config\") pod \"dnsmasq-dns-78dd6ddcc-5bhdq\" (UID: \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.270717 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-5bhdq\" (UID: \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.287020 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl959\" (UniqueName: \"kubernetes.io/projected/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-kube-api-access-gl959\") pod \"dnsmasq-dns-78dd6ddcc-5bhdq\" (UID: \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.320125 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.401918 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.766246 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jwx9f"] Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.882216 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5bhdq"] Feb 02 22:51:53 crc kubenswrapper[4755]: W0202 22:51:53.895561 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e5c4d02_0fb9_48fc_b9fa_afb8a6c031ed.slice/crio-d17397c1c86696e208f1ea24ae22325b7157a201aab9e7956154c48727695046 WatchSource:0}: Error finding container d17397c1c86696e208f1ea24ae22325b7157a201aab9e7956154c48727695046: Status 404 returned error can't find the container with id d17397c1c86696e208f1ea24ae22325b7157a201aab9e7956154c48727695046 Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.920274 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" event={"ID":"5c336de3-751a-4836-a2a3-102de5e0fd11","Type":"ContainerStarted","Data":"11d2d192ddcf9842d346e4c97b7289278d51ff97fef5c66d5b68a2e2d1d3314f"} Feb 02 22:51:53 crc kubenswrapper[4755]: I0202 22:51:53.921620 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" event={"ID":"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed","Type":"ContainerStarted","Data":"d17397c1c86696e208f1ea24ae22325b7157a201aab9e7956154c48727695046"} Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.693789 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jwx9f"] Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.722232 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-ntn7z"] Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.724021 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.732756 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-ntn7z"] Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.804448 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-dns-svc\") pod \"dnsmasq-dns-666b6646f7-ntn7z\" (UID: \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\") " pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.804517 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-config\") pod \"dnsmasq-dns-666b6646f7-ntn7z\" (UID: \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\") " pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.804542 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf6kf\" (UniqueName: \"kubernetes.io/projected/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-kube-api-access-hf6kf\") pod \"dnsmasq-dns-666b6646f7-ntn7z\" (UID: \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\") " pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.907100 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-config\") pod \"dnsmasq-dns-666b6646f7-ntn7z\" (UID: \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\") " pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.907148 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf6kf\" (UniqueName: \"kubernetes.io/projected/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-kube-api-access-hf6kf\") pod \"dnsmasq-dns-666b6646f7-ntn7z\" (UID: \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\") " pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.907228 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-dns-svc\") pod \"dnsmasq-dns-666b6646f7-ntn7z\" (UID: \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\") " pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.908058 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-dns-svc\") pod \"dnsmasq-dns-666b6646f7-ntn7z\" (UID: \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\") " pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.908123 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-config\") pod \"dnsmasq-dns-666b6646f7-ntn7z\" (UID: \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\") " pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.963400 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf6kf\" (UniqueName: \"kubernetes.io/projected/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-kube-api-access-hf6kf\") pod \"dnsmasq-dns-666b6646f7-ntn7z\" (UID: \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\") " pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.993377 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5bhdq"] Feb 02 22:51:55 crc kubenswrapper[4755]: I0202 22:51:55.997274 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wdgnn"] Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.005602 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.041965 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wdgnn"] Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.051127 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.130467 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-config\") pod \"dnsmasq-dns-57d769cc4f-wdgnn\" (UID: \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\") " pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.130784 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rkds\" (UniqueName: \"kubernetes.io/projected/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-kube-api-access-2rkds\") pod \"dnsmasq-dns-57d769cc4f-wdgnn\" (UID: \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\") " pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.130804 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-wdgnn\" (UID: \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\") " pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.231761 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rkds\" (UniqueName: \"kubernetes.io/projected/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-kube-api-access-2rkds\") pod \"dnsmasq-dns-57d769cc4f-wdgnn\" (UID: \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\") " pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.231802 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-wdgnn\" (UID: \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\") " pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.231842 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-config\") pod \"dnsmasq-dns-57d769cc4f-wdgnn\" (UID: \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\") " pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.232665 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-wdgnn\" (UID: \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\") " pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.232707 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-config\") pod \"dnsmasq-dns-57d769cc4f-wdgnn\" (UID: \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\") " pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.253999 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rkds\" (UniqueName: \"kubernetes.io/projected/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-kube-api-access-2rkds\") pod \"dnsmasq-dns-57d769cc4f-wdgnn\" (UID: \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\") " pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.337940 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.550092 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-ntn7z"] Feb 02 22:51:56 crc kubenswrapper[4755]: W0202 22:51:56.563658 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podceb03ee7_fe5e_46b0_8360_9e73297c5c05.slice/crio-1794674d256c05e879346dc3fdc646dd0fcec7719e65d53f91cc74a85ee145db WatchSource:0}: Error finding container 1794674d256c05e879346dc3fdc646dd0fcec7719e65d53f91cc74a85ee145db: Status 404 returned error can't find the container with id 1794674d256c05e879346dc3fdc646dd0fcec7719e65d53f91cc74a85ee145db Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.803785 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wdgnn"] Feb 02 22:51:56 crc kubenswrapper[4755]: W0202 22:51:56.815904 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb8e08d8_9a36_46df_8a9d_5ca2ea1a7559.slice/crio-39688be6d753f7ac03abeff7feee541e52d8daa80aaed442ff301644821da5e8 WatchSource:0}: Error finding container 39688be6d753f7ac03abeff7feee541e52d8daa80aaed442ff301644821da5e8: Status 404 returned error can't find the container with id 39688be6d753f7ac03abeff7feee541e52d8daa80aaed442ff301644821da5e8 Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.873120 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.874473 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.882220 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.882317 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.882223 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.883091 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.884275 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.884321 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.884471 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-jlfgv" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.885558 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.954605 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" event={"ID":"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559","Type":"ContainerStarted","Data":"39688be6d753f7ac03abeff7feee541e52d8daa80aaed442ff301644821da5e8"} Feb 02 22:51:56 crc kubenswrapper[4755]: I0202 22:51:56.956056 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" event={"ID":"ceb03ee7-fe5e-46b0-8360-9e73297c5c05","Type":"ContainerStarted","Data":"1794674d256c05e879346dc3fdc646dd0fcec7719e65d53f91cc74a85ee145db"} Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.042527 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.042611 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-config-data\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.042635 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/58b4faf6-d651-4094-b0bd-857e9074d9a9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.042659 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/58b4faf6-d651-4094-b0bd-857e9074d9a9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.042676 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.042700 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.042738 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwprw\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-kube-api-access-dwprw\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.042768 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.042787 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.042807 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.042826 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.144118 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.144180 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.144235 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.144342 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-config-data\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.144527 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/58b4faf6-d651-4094-b0bd-857e9074d9a9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.145106 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.145284 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/58b4faf6-d651-4094-b0bd-857e9074d9a9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.148103 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.148160 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.148205 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwprw\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-kube-api-access-dwprw\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.146787 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-config-data\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.148307 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.148418 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.145543 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.152412 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.156398 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.157158 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/58b4faf6-d651-4094-b0bd-857e9074d9a9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.157812 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.158353 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/dc3c1b15045b3f4e3ad98c980816f9ce7e3f9051073beec991ed4f7eea0a77f6/globalmount\"" pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.164648 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.173547 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.175134 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/58b4faf6-d651-4094-b0bd-857e9074d9a9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.181155 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwprw\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-kube-api-access-dwprw\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.181408 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.184220 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.186607 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.186886 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.187014 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-mr7lj" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.187118 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.187274 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.187371 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.189162 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.191140 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.226686 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\") pod \"rabbitmq-server-0\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.359634 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbvf8\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-kube-api-access-xbvf8\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.359687 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.359721 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.359755 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.359784 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.359811 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.359834 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.359850 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.359867 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.359891 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/358a528d-56dd-4737-af2c-750423bbdc56-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.359934 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/358a528d-56dd-4737-af2c-750423bbdc56-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.461031 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbvf8\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-kube-api-access-xbvf8\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.461092 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.461126 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.461141 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.461166 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.461191 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.461218 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.461233 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.461251 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.461273 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/358a528d-56dd-4737-af2c-750423bbdc56-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.461316 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/358a528d-56dd-4737-af2c-750423bbdc56-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.462050 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.462488 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.464175 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.464338 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.464986 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.469722 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.481277 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/358a528d-56dd-4737-af2c-750423bbdc56-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.481358 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/358a528d-56dd-4737-af2c-750423bbdc56-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.486246 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.486274 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e6532bd52b9aa617ec4882185abeecf9678960789d6649da4c0d87ef9f673b0b/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.486388 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.491483 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbvf8\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-kube-api-access-xbvf8\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.498427 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.659474 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\") pod \"rabbitmq-cell1-server-0\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:57 crc kubenswrapper[4755]: I0202 22:51:57.862501 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.000084 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.277009 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.278221 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.280885 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.281070 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-4wbvb" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.281196 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.281703 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.289823 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.298755 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.379061 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.379143 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.379190 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.379222 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-kolla-config\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.379255 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pk6l\" (UniqueName: \"kubernetes.io/projected/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-kube-api-access-2pk6l\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.379277 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c8decfc4-5dc8-49fe-8554-e7026e274044\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c8decfc4-5dc8-49fe-8554-e7026e274044\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.379306 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.379339 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-config-data-default\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.480874 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.480956 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.480994 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.481035 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-kolla-config\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.481083 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pk6l\" (UniqueName: \"kubernetes.io/projected/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-kube-api-access-2pk6l\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.481109 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c8decfc4-5dc8-49fe-8554-e7026e274044\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c8decfc4-5dc8-49fe-8554-e7026e274044\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.481148 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.481180 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-config-data-default\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.482451 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.483426 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-config-data-default\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.483894 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-kolla-config\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.485090 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.486827 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.491328 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.491374 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c8decfc4-5dc8-49fe-8554-e7026e274044\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c8decfc4-5dc8-49fe-8554-e7026e274044\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/116b73b32c242dff40cf9fab30ed04750873b947e8e72d8385af0b15860ee597/globalmount\"" pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.499140 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pk6l\" (UniqueName: \"kubernetes.io/projected/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-kube-api-access-2pk6l\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.503762 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.540656 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c8decfc4-5dc8-49fe-8554-e7026e274044\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c8decfc4-5dc8-49fe-8554-e7026e274044\") pod \"openstack-galera-0\" (UID: \"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0\") " pod="openstack/openstack-galera-0" Feb 02 22:51:58 crc kubenswrapper[4755]: I0202 22:51:58.608591 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.712800 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.714349 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.716795 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.716877 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.717470 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-2rf2j" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.718188 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.727510 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.810398 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/91145094-ac53-469f-9ac1-e10732802d35-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.810459 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91145094-ac53-469f-9ac1-e10732802d35-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.810486 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/91145094-ac53-469f-9ac1-e10732802d35-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.810506 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/91145094-ac53-469f-9ac1-e10732802d35-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.810553 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b2973f7c-0f74-48d2-a64a-fcef03c2027e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b2973f7c-0f74-48d2-a64a-fcef03c2027e\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.810573 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lv9d\" (UniqueName: \"kubernetes.io/projected/91145094-ac53-469f-9ac1-e10732802d35-kube-api-access-7lv9d\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.810599 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91145094-ac53-469f-9ac1-e10732802d35-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.810625 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/91145094-ac53-469f-9ac1-e10732802d35-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.902070 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.905350 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.916907 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.917104 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-55zt5" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.917867 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91145094-ac53-469f-9ac1-e10732802d35-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.917910 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/91145094-ac53-469f-9ac1-e10732802d35-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.917946 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/91145094-ac53-469f-9ac1-e10732802d35-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.917990 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91145094-ac53-469f-9ac1-e10732802d35-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.918015 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/91145094-ac53-469f-9ac1-e10732802d35-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.918032 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/91145094-ac53-469f-9ac1-e10732802d35-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.918077 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b2973f7c-0f74-48d2-a64a-fcef03c2027e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b2973f7c-0f74-48d2-a64a-fcef03c2027e\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.918111 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lv9d\" (UniqueName: \"kubernetes.io/projected/91145094-ac53-469f-9ac1-e10732802d35-kube-api-access-7lv9d\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.918936 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.919886 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/91145094-ac53-469f-9ac1-e10732802d35-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.921252 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91145094-ac53-469f-9ac1-e10732802d35-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.921664 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/91145094-ac53-469f-9ac1-e10732802d35-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.921950 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/91145094-ac53-469f-9ac1-e10732802d35-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.933945 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/91145094-ac53-469f-9ac1-e10732802d35-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.935348 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.947694 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.947776 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b2973f7c-0f74-48d2-a64a-fcef03c2027e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b2973f7c-0f74-48d2-a64a-fcef03c2027e\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/91af59b4e4410e143ccf9f2500fb52dfd2b9cadfdbf20ff345f6abaa1a4a23ce/globalmount\"" pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.954754 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91145094-ac53-469f-9ac1-e10732802d35-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:51:59 crc kubenswrapper[4755]: I0202 22:51:59.957315 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lv9d\" (UniqueName: \"kubernetes.io/projected/91145094-ac53-469f-9ac1-e10732802d35-kube-api-access-7lv9d\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.019513 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74tdp\" (UniqueName: \"kubernetes.io/projected/2ebaf1c7-94a5-47df-abbf-5ac70251c816-kube-api-access-74tdp\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.019593 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2ebaf1c7-94a5-47df-abbf-5ac70251c816-config-data\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.019625 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ebaf1c7-94a5-47df-abbf-5ac70251c816-combined-ca-bundle\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.019650 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ebaf1c7-94a5-47df-abbf-5ac70251c816-memcached-tls-certs\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.019672 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2ebaf1c7-94a5-47df-abbf-5ac70251c816-kolla-config\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.121796 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2ebaf1c7-94a5-47df-abbf-5ac70251c816-config-data\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.121857 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ebaf1c7-94a5-47df-abbf-5ac70251c816-combined-ca-bundle\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.121880 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ebaf1c7-94a5-47df-abbf-5ac70251c816-memcached-tls-certs\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.121899 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2ebaf1c7-94a5-47df-abbf-5ac70251c816-kolla-config\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.121960 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74tdp\" (UniqueName: \"kubernetes.io/projected/2ebaf1c7-94a5-47df-abbf-5ac70251c816-kube-api-access-74tdp\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.122487 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2ebaf1c7-94a5-47df-abbf-5ac70251c816-config-data\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.123282 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2ebaf1c7-94a5-47df-abbf-5ac70251c816-kolla-config\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.130272 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ebaf1c7-94a5-47df-abbf-5ac70251c816-memcached-tls-certs\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.132433 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ebaf1c7-94a5-47df-abbf-5ac70251c816-combined-ca-bundle\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.141885 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74tdp\" (UniqueName: \"kubernetes.io/projected/2ebaf1c7-94a5-47df-abbf-5ac70251c816-kube-api-access-74tdp\") pod \"memcached-0\" (UID: \"2ebaf1c7-94a5-47df-abbf-5ac70251c816\") " pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.148076 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b2973f7c-0f74-48d2-a64a-fcef03c2027e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b2973f7c-0f74-48d2-a64a-fcef03c2027e\") pod \"openstack-cell1-galera-0\" (UID: \"91145094-ac53-469f-9ac1-e10732802d35\") " pod="openstack/openstack-cell1-galera-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.299250 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 02 22:52:00 crc kubenswrapper[4755]: I0202 22:52:00.344130 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 02 22:52:01 crc kubenswrapper[4755]: I0202 22:52:01.621640 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 22:52:01 crc kubenswrapper[4755]: I0202 22:52:01.622823 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 22:52:01 crc kubenswrapper[4755]: I0202 22:52:01.632538 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-h8z8w" Feb 02 22:52:01 crc kubenswrapper[4755]: I0202 22:52:01.644706 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 22:52:01 crc kubenswrapper[4755]: I0202 22:52:01.657809 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9bf7\" (UniqueName: \"kubernetes.io/projected/50c1f741-a034-4f5f-8cb2-1e7c5df9a090-kube-api-access-r9bf7\") pod \"kube-state-metrics-0\" (UID: \"50c1f741-a034-4f5f-8cb2-1e7c5df9a090\") " pod="openstack/kube-state-metrics-0" Feb 02 22:52:01 crc kubenswrapper[4755]: I0202 22:52:01.760599 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9bf7\" (UniqueName: \"kubernetes.io/projected/50c1f741-a034-4f5f-8cb2-1e7c5df9a090-kube-api-access-r9bf7\") pod \"kube-state-metrics-0\" (UID: \"50c1f741-a034-4f5f-8cb2-1e7c5df9a090\") " pod="openstack/kube-state-metrics-0" Feb 02 22:52:01 crc kubenswrapper[4755]: I0202 22:52:01.779312 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9bf7\" (UniqueName: \"kubernetes.io/projected/50c1f741-a034-4f5f-8cb2-1e7c5df9a090-kube-api-access-r9bf7\") pod \"kube-state-metrics-0\" (UID: \"50c1f741-a034-4f5f-8cb2-1e7c5df9a090\") " pod="openstack/kube-state-metrics-0" Feb 02 22:52:01 crc kubenswrapper[4755]: I0202 22:52:01.942970 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.247167 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.248768 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.253443 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.253636 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-chxhw" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.253902 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.254071 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.255192 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.274855 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.369849 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/9664cb92-62aa-4d52-9936-96c48dc7c8d2-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.369898 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g4rw\" (UniqueName: \"kubernetes.io/projected/9664cb92-62aa-4d52-9936-96c48dc7c8d2-kube-api-access-7g4rw\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.369934 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/9664cb92-62aa-4d52-9936-96c48dc7c8d2-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.369966 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/9664cb92-62aa-4d52-9936-96c48dc7c8d2-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.369988 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/9664cb92-62aa-4d52-9936-96c48dc7c8d2-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.370026 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/9664cb92-62aa-4d52-9936-96c48dc7c8d2-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.370051 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/9664cb92-62aa-4d52-9936-96c48dc7c8d2-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.474156 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/9664cb92-62aa-4d52-9936-96c48dc7c8d2-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.474463 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/9664cb92-62aa-4d52-9936-96c48dc7c8d2-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.474515 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/9664cb92-62aa-4d52-9936-96c48dc7c8d2-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.474536 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g4rw\" (UniqueName: \"kubernetes.io/projected/9664cb92-62aa-4d52-9936-96c48dc7c8d2-kube-api-access-7g4rw\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.474566 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/9664cb92-62aa-4d52-9936-96c48dc7c8d2-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.474595 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/9664cb92-62aa-4d52-9936-96c48dc7c8d2-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.474614 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/9664cb92-62aa-4d52-9936-96c48dc7c8d2-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.478554 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/9664cb92-62aa-4d52-9936-96c48dc7c8d2-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.482375 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/9664cb92-62aa-4d52-9936-96c48dc7c8d2-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.488530 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/9664cb92-62aa-4d52-9936-96c48dc7c8d2-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.489037 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/9664cb92-62aa-4d52-9936-96c48dc7c8d2-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.490202 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/9664cb92-62aa-4d52-9936-96c48dc7c8d2-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.508372 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/9664cb92-62aa-4d52-9936-96c48dc7c8d2-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.512087 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g4rw\" (UniqueName: \"kubernetes.io/projected/9664cb92-62aa-4d52-9936-96c48dc7c8d2-kube-api-access-7g4rw\") pod \"alertmanager-metric-storage-0\" (UID: \"9664cb92-62aa-4d52-9936-96c48dc7c8d2\") " pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.656486 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.818042 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.819760 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.823631 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.823793 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.823884 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.824030 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.824090 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.824688 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-cwccx" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.824806 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.824881 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.841666 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.881075 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tl8nt\" (UniqueName: \"kubernetes.io/projected/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-kube-api-access-tl8nt\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.881169 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.881199 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.881241 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.881281 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.881306 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.881445 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.881463 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.881479 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.881503 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-config\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.984810 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tl8nt\" (UniqueName: \"kubernetes.io/projected/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-kube-api-access-tl8nt\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.984865 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.984888 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.984916 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.984952 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.984972 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.985018 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.985034 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.985052 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.985076 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-config\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.986295 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.987227 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.987789 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.994090 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:02 crc kubenswrapper[4755]: I0202 22:52:02.994401 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-config\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:03 crc kubenswrapper[4755]: I0202 22:52:03.007357 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:03 crc kubenswrapper[4755]: I0202 22:52:03.007438 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:03 crc kubenswrapper[4755]: I0202 22:52:03.008070 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:52:03 crc kubenswrapper[4755]: I0202 22:52:03.008100 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f9096daf592eed82232c3beba2134ba1e13b31cbf7a7e87be279794dff9df4fb/globalmount\"" pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:03 crc kubenswrapper[4755]: I0202 22:52:03.008861 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:03 crc kubenswrapper[4755]: I0202 22:52:03.014502 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tl8nt\" (UniqueName: \"kubernetes.io/projected/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-kube-api-access-tl8nt\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:03 crc kubenswrapper[4755]: I0202 22:52:03.039468 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\") pod \"prometheus-metric-storage-0\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:03 crc kubenswrapper[4755]: I0202 22:52:03.143495 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:04 crc kubenswrapper[4755]: I0202 22:52:04.041597 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"58b4faf6-d651-4094-b0bd-857e9074d9a9","Type":"ContainerStarted","Data":"453aebe606241b7fd42c613ff692d7e396c337778248432aca99de35ecfea722"} Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.329967 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jdd7m"] Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.337002 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.345316 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-bhm24" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.345687 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.347843 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.350540 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-bzn9p"] Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.352548 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.362521 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jdd7m"] Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.378461 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-bzn9p"] Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.428717 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-var-lib\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.428796 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vwhj\" (UniqueName: \"kubernetes.io/projected/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-kube-api-access-5vwhj\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.428838 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-scripts\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.428860 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-var-log\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.428914 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/06016f18-0f29-4d82-aa08-233d91c9a744-scripts\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.428949 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqgdc\" (UniqueName: \"kubernetes.io/projected/06016f18-0f29-4d82-aa08-233d91c9a744-kube-api-access-pqgdc\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.428978 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-etc-ovs\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.429015 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/06016f18-0f29-4d82-aa08-233d91c9a744-var-log-ovn\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.429045 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06016f18-0f29-4d82-aa08-233d91c9a744-combined-ca-bundle\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.429068 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-var-run\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.429103 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/06016f18-0f29-4d82-aa08-233d91c9a744-ovn-controller-tls-certs\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.429134 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/06016f18-0f29-4d82-aa08-233d91c9a744-var-run\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.429160 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/06016f18-0f29-4d82-aa08-233d91c9a744-var-run-ovn\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530030 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/06016f18-0f29-4d82-aa08-233d91c9a744-scripts\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530078 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqgdc\" (UniqueName: \"kubernetes.io/projected/06016f18-0f29-4d82-aa08-233d91c9a744-kube-api-access-pqgdc\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530101 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-etc-ovs\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530131 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/06016f18-0f29-4d82-aa08-233d91c9a744-var-log-ovn\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530152 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06016f18-0f29-4d82-aa08-233d91c9a744-combined-ca-bundle\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530171 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-var-run\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530196 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/06016f18-0f29-4d82-aa08-233d91c9a744-ovn-controller-tls-certs\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530219 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/06016f18-0f29-4d82-aa08-233d91c9a744-var-run\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530237 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/06016f18-0f29-4d82-aa08-233d91c9a744-var-run-ovn\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530269 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-var-lib\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530285 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vwhj\" (UniqueName: \"kubernetes.io/projected/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-kube-api-access-5vwhj\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530309 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-scripts\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530325 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-var-log\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530557 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-etc-ovs\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530642 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/06016f18-0f29-4d82-aa08-233d91c9a744-var-log-ovn\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530675 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-var-log\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530706 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/06016f18-0f29-4d82-aa08-233d91c9a744-var-run-ovn\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530805 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/06016f18-0f29-4d82-aa08-233d91c9a744-var-run\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530822 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-var-lib\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.530852 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-var-run\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.533084 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-scripts\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.533830 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/06016f18-0f29-4d82-aa08-233d91c9a744-scripts\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.536612 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06016f18-0f29-4d82-aa08-233d91c9a744-combined-ca-bundle\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.544641 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/06016f18-0f29-4d82-aa08-233d91c9a744-ovn-controller-tls-certs\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.545794 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vwhj\" (UniqueName: \"kubernetes.io/projected/cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0-kube-api-access-5vwhj\") pod \"ovn-controller-ovs-bzn9p\" (UID: \"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0\") " pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.549042 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqgdc\" (UniqueName: \"kubernetes.io/projected/06016f18-0f29-4d82-aa08-233d91c9a744-kube-api-access-pqgdc\") pod \"ovn-controller-jdd7m\" (UID: \"06016f18-0f29-4d82-aa08-233d91c9a744\") " pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.665310 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.678865 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.881454 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.897998 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.898127 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.914382 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.914864 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-kkfhg" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.916316 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.916527 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.917268 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.942658 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47898ca9-c1e8-4b61-9e3c-701743aff784-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.942713 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c372d72f-9011-4f3c-8cc8-b5dd651b15b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c372d72f-9011-4f3c-8cc8-b5dd651b15b1\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.942777 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/47898ca9-c1e8-4b61-9e3c-701743aff784-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.942804 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk88m\" (UniqueName: \"kubernetes.io/projected/47898ca9-c1e8-4b61-9e3c-701743aff784-kube-api-access-gk88m\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.942857 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47898ca9-c1e8-4b61-9e3c-701743aff784-config\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.942878 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/47898ca9-c1e8-4b61-9e3c-701743aff784-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.942912 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/47898ca9-c1e8-4b61-9e3c-701743aff784-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:05 crc kubenswrapper[4755]: I0202 22:52:05.942929 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47898ca9-c1e8-4b61-9e3c-701743aff784-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.045070 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47898ca9-c1e8-4b61-9e3c-701743aff784-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.045174 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47898ca9-c1e8-4b61-9e3c-701743aff784-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.045313 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c372d72f-9011-4f3c-8cc8-b5dd651b15b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c372d72f-9011-4f3c-8cc8-b5dd651b15b1\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.045425 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/47898ca9-c1e8-4b61-9e3c-701743aff784-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.045547 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk88m\" (UniqueName: \"kubernetes.io/projected/47898ca9-c1e8-4b61-9e3c-701743aff784-kube-api-access-gk88m\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.045849 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47898ca9-c1e8-4b61-9e3c-701743aff784-config\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.045894 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/47898ca9-c1e8-4b61-9e3c-701743aff784-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.045945 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/47898ca9-c1e8-4b61-9e3c-701743aff784-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.046958 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47898ca9-c1e8-4b61-9e3c-701743aff784-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.048938 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47898ca9-c1e8-4b61-9e3c-701743aff784-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.049716 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.049756 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c372d72f-9011-4f3c-8cc8-b5dd651b15b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c372d72f-9011-4f3c-8cc8-b5dd651b15b1\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e67ed2ceb417a4c64e52380dc023ad6364a818ae4db8854a247bd950451bb9e3/globalmount\"" pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.050384 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/47898ca9-c1e8-4b61-9e3c-701743aff784-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.050489 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47898ca9-c1e8-4b61-9e3c-701743aff784-config\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.051150 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/47898ca9-c1e8-4b61-9e3c-701743aff784-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.061332 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/47898ca9-c1e8-4b61-9e3c-701743aff784-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.067047 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk88m\" (UniqueName: \"kubernetes.io/projected/47898ca9-c1e8-4b61-9e3c-701743aff784-kube-api-access-gk88m\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.101449 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c372d72f-9011-4f3c-8cc8-b5dd651b15b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c372d72f-9011-4f3c-8cc8-b5dd651b15b1\") pod \"ovsdbserver-nb-0\" (UID: \"47898ca9-c1e8-4b61-9e3c-701743aff784\") " pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:06 crc kubenswrapper[4755]: I0202 22:52:06.240781 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:07 crc kubenswrapper[4755]: I0202 22:52:07.660169 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 22:52:10 crc kubenswrapper[4755]: I0202 22:52:10.872548 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 02 22:52:10 crc kubenswrapper[4755]: I0202 22:52:10.874917 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:10 crc kubenswrapper[4755]: I0202 22:52:10.877765 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Feb 02 22:52:10 crc kubenswrapper[4755]: I0202 22:52:10.878012 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-zvx5m" Feb 02 22:52:10 crc kubenswrapper[4755]: I0202 22:52:10.878257 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Feb 02 22:52:10 crc kubenswrapper[4755]: I0202 22:52:10.878403 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Feb 02 22:52:10 crc kubenswrapper[4755]: I0202 22:52:10.901250 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.033482 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhw78\" (UniqueName: \"kubernetes.io/projected/caf81ad2-83b7-4930-a67e-0c8dce4690ad-kube-api-access-fhw78\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.033687 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/caf81ad2-83b7-4930-a67e-0c8dce4690ad-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.033876 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caf81ad2-83b7-4930-a67e-0c8dce4690ad-config\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.033953 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/caf81ad2-83b7-4930-a67e-0c8dce4690ad-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.034076 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/caf81ad2-83b7-4930-a67e-0c8dce4690ad-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.034173 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caf81ad2-83b7-4930-a67e-0c8dce4690ad-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.034206 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-45a78df4-4039-409d-98db-3fcc656d0ccd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45a78df4-4039-409d-98db-3fcc656d0ccd\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.034273 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/caf81ad2-83b7-4930-a67e-0c8dce4690ad-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.135657 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/caf81ad2-83b7-4930-a67e-0c8dce4690ad-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.135765 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caf81ad2-83b7-4930-a67e-0c8dce4690ad-config\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.135796 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/caf81ad2-83b7-4930-a67e-0c8dce4690ad-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.135835 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/caf81ad2-83b7-4930-a67e-0c8dce4690ad-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.135886 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caf81ad2-83b7-4930-a67e-0c8dce4690ad-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.135913 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-45a78df4-4039-409d-98db-3fcc656d0ccd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45a78df4-4039-409d-98db-3fcc656d0ccd\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.135951 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/caf81ad2-83b7-4930-a67e-0c8dce4690ad-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.135983 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhw78\" (UniqueName: \"kubernetes.io/projected/caf81ad2-83b7-4930-a67e-0c8dce4690ad-kube-api-access-fhw78\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.136630 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caf81ad2-83b7-4930-a67e-0c8dce4690ad-config\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.136862 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/caf81ad2-83b7-4930-a67e-0c8dce4690ad-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.136885 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/caf81ad2-83b7-4930-a67e-0c8dce4690ad-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.141036 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/caf81ad2-83b7-4930-a67e-0c8dce4690ad-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.142058 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.142096 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-45a78df4-4039-409d-98db-3fcc656d0ccd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45a78df4-4039-409d-98db-3fcc656d0ccd\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d507ee441e203445d6dad0f78a6fccb5908fdba3907ec78daa1e7cb811e689d4/globalmount\"" pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.145001 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/caf81ad2-83b7-4930-a67e-0c8dce4690ad-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.159771 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhw78\" (UniqueName: \"kubernetes.io/projected/caf81ad2-83b7-4930-a67e-0c8dce4690ad-kube-api-access-fhw78\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.161794 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caf81ad2-83b7-4930-a67e-0c8dce4690ad-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.177295 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-45a78df4-4039-409d-98db-3fcc656d0ccd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45a78df4-4039-409d-98db-3fcc656d0ccd\") pod \"ovsdbserver-sb-0\" (UID: \"caf81ad2-83b7-4930-a67e-0c8dce4690ad\") " pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.220652 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.788182 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt"] Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.789648 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.798025 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca-bundle" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.798223 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-grpc" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.798353 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-http" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.798477 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-dockercfg-8l8hf" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.798610 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-config" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.813539 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt"] Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.953977 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.954033 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-config\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.954069 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.954095 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:11 crc kubenswrapper[4755]: I0202 22:52:11.954130 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcbbw\" (UniqueName: \"kubernetes.io/projected/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-kube-api-access-jcbbw\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.052721 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f"] Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.054830 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.055143 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.055187 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-config\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.055223 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.055250 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.055276 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcbbw\" (UniqueName: \"kubernetes.io/projected/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-kube-api-access-jcbbw\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.057620 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-grpc" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.057993 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-loki-s3" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.058194 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-http" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.058271 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-config\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.059392 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.061788 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.062375 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.075507 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f"] Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.081662 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcbbw\" (UniqueName: \"kubernetes.io/projected/c6c5284b-8717-4e3a-bfc6-73fcadc8303d-kube-api-access-jcbbw\") pod \"cloudkitty-lokistack-distributor-66dfd9bb-99jqt\" (UID: \"c6c5284b-8717-4e3a-bfc6-73fcadc8303d\") " pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.124096 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.157589 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.157642 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-config\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.157665 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.157686 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.157780 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.157830 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdr52\" (UniqueName: \"kubernetes.io/projected/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-kube-api-access-jdr52\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.169641 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9"] Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.170994 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.173853 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-grpc" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.175571 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-http" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.193946 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9"] Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.259947 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/654caa39-8ecb-43ef-b132-aca5a922fd0f-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.260061 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.260106 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-config\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.260135 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.260175 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.260206 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/654caa39-8ecb-43ef-b132-aca5a922fd0f-config\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.260229 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/654caa39-8ecb-43ef-b132-aca5a922fd0f-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.260307 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.260353 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/654caa39-8ecb-43ef-b132-aca5a922fd0f-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.260383 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdr52\" (UniqueName: \"kubernetes.io/projected/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-kube-api-access-jdr52\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.260422 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq85n\" (UniqueName: \"kubernetes.io/projected/654caa39-8ecb-43ef-b132-aca5a922fd0f-kube-api-access-cq85n\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.262131 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-config\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.264808 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.268824 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.269124 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.270825 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.290496 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd"] Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.292021 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.295401 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd"] Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.300689 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdr52\" (UniqueName: \"kubernetes.io/projected/bd97d545-a2b9-47ea-a0cb-564ef5ac59d3-kube-api-access-jdr52\") pod \"cloudkitty-lokistack-querier-795fd8f8cc-gc84f\" (UID: \"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3\") " pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.301865 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.307392 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-client-http" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.307716 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-dockercfg-p9dsx" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.308904 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.309026 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway-ca-bundle" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.309111 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-http" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.309217 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.331658 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh"] Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.332846 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.347814 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh"] Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.362371 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/654caa39-8ecb-43ef-b132-aca5a922fd0f-config\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.362406 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/654caa39-8ecb-43ef-b132-aca5a922fd0f-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.362484 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/654caa39-8ecb-43ef-b132-aca5a922fd0f-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.362505 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq85n\" (UniqueName: \"kubernetes.io/projected/654caa39-8ecb-43ef-b132-aca5a922fd0f-kube-api-access-cq85n\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.362539 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/654caa39-8ecb-43ef-b132-aca5a922fd0f-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.363622 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/654caa39-8ecb-43ef-b132-aca5a922fd0f-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.363992 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/654caa39-8ecb-43ef-b132-aca5a922fd0f-config\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.365817 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/654caa39-8ecb-43ef-b132-aca5a922fd0f-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.368153 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/654caa39-8ecb-43ef-b132-aca5a922fd0f-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.386659 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq85n\" (UniqueName: \"kubernetes.io/projected/654caa39-8ecb-43ef-b132-aca5a922fd0f-kube-api-access-cq85n\") pod \"cloudkitty-lokistack-query-frontend-5cd44666df-mhst9\" (UID: \"654caa39-8ecb-43ef-b132-aca5a922fd0f\") " pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.437699 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.465679 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.465747 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/957644f6-ace8-4ceb-88b6-7a2228097714-tenants\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.465763 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.465788 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/957644f6-ace8-4ceb-88b6-7a2228097714-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.465888 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.465912 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.465935 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4p9l\" (UniqueName: \"kubernetes.io/projected/957644f6-ace8-4ceb-88b6-7a2228097714-kube-api-access-s4p9l\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.465964 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.465983 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-tenants\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.466028 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.466045 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-rbac\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.466065 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.466079 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.466096 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/957644f6-ace8-4ceb-88b6-7a2228097714-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.466117 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.466210 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.466252 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-rbac\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.466357 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ht7x9\" (UniqueName: \"kubernetes.io/projected/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-kube-api-access-ht7x9\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.495760 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.568008 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.568094 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4p9l\" (UniqueName: \"kubernetes.io/projected/957644f6-ace8-4ceb-88b6-7a2228097714-kube-api-access-s4p9l\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.568141 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.568162 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-tenants\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.568240 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.568259 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-rbac\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.568301 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.568319 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.568337 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/957644f6-ace8-4ceb-88b6-7a2228097714-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.568385 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.568408 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.568424 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-rbac\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: E0202 22:52:12.568494 4755 secret.go:188] Couldn't get secret openstack/cloudkitty-lokistack-gateway-http: secret "cloudkitty-lokistack-gateway-http" not found Feb 02 22:52:12 crc kubenswrapper[4755]: E0202 22:52:12.568555 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/957644f6-ace8-4ceb-88b6-7a2228097714-tls-secret podName:957644f6-ace8-4ceb-88b6-7a2228097714 nodeName:}" failed. No retries permitted until 2026-02-02 22:52:13.068537108 +0000 UTC m=+1088.759757434 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/957644f6-ace8-4ceb-88b6-7a2228097714-tls-secret") pod "cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" (UID: "957644f6-ace8-4ceb-88b6-7a2228097714") : secret "cloudkitty-lokistack-gateway-http" not found Feb 02 22:52:12 crc kubenswrapper[4755]: E0202 22:52:12.568501 4755 secret.go:188] Couldn't get secret openstack/cloudkitty-lokistack-gateway-http: secret "cloudkitty-lokistack-gateway-http" not found Feb 02 22:52:12 crc kubenswrapper[4755]: E0202 22:52:12.568782 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-tls-secret podName:ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a nodeName:}" failed. No retries permitted until 2026-02-02 22:52:13.068773785 +0000 UTC m=+1088.759994111 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-tls-secret") pod "cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" (UID: "ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a") : secret "cloudkitty-lokistack-gateway-http" not found Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.569391 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.569461 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.569504 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-rbac\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.569555 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.570292 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.570489 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.570930 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-rbac\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.571459 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.572047 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ht7x9\" (UniqueName: \"kubernetes.io/projected/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-kube-api-access-ht7x9\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.572233 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.572308 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/957644f6-ace8-4ceb-88b6-7a2228097714-tenants\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.572332 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.572365 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/957644f6-ace8-4ceb-88b6-7a2228097714-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.572405 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.572881 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.573025 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/957644f6-ace8-4ceb-88b6-7a2228097714-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.573793 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.573816 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-tenants\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.575708 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/957644f6-ace8-4ceb-88b6-7a2228097714-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.575826 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/957644f6-ace8-4ceb-88b6-7a2228097714-tenants\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.583580 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4p9l\" (UniqueName: \"kubernetes.io/projected/957644f6-ace8-4ceb-88b6-7a2228097714-kube-api-access-s4p9l\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.586407 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ht7x9\" (UniqueName: \"kubernetes.io/projected/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-kube-api-access-ht7x9\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.992456 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.993768 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.996710 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-grpc" Feb 02 22:52:12 crc kubenswrapper[4755]: I0202 22:52:12.997126 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-http" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.011768 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.080901 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.080945 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/957644f6-ace8-4ceb-88b6-7a2228097714-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.084167 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/957644f6-ace8-4ceb-88b6-7a2228097714-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh\" (UID: \"957644f6-ace8-4ceb-88b6-7a2228097714\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.085403 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a-tls-secret\") pod \"cloudkitty-lokistack-gateway-7db4f4db8c-kphqd\" (UID: \"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a\") " pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.151524 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.152676 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.155448 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-grpc" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.155674 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-http" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.159872 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.183108 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/5717cdb5-a227-4975-b808-068f0ace63c5-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.183279 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5717cdb5-a227-4975-b808-068f0ace63c5-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.183383 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5717cdb5-a227-4975-b808-068f0ace63c5-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.183490 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/5717cdb5-a227-4975-b808-068f0ace63c5-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.183809 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.183884 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.183920 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkbq9\" (UniqueName: \"kubernetes.io/projected/5717cdb5-a227-4975-b808-068f0ace63c5-kube-api-access-wkbq9\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.183969 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/5717cdb5-a227-4975-b808-068f0ace63c5-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.239017 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.240071 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.244999 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-http" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.245307 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-grpc" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.253439 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.257219 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.264849 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285277 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt8dl\" (UniqueName: \"kubernetes.io/projected/1092ca47-2068-4bc1-9e92-a085dc5eac3a-kube-api-access-nt8dl\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285326 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5717cdb5-a227-4975-b808-068f0ace63c5-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285350 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285368 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/1092ca47-2068-4bc1-9e92-a085dc5eac3a-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285398 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5717cdb5-a227-4975-b808-068f0ace63c5-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285436 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/5717cdb5-a227-4975-b808-068f0ace63c5-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285467 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1092ca47-2068-4bc1-9e92-a085dc5eac3a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285493 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/1092ca47-2068-4bc1-9e92-a085dc5eac3a-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285520 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/1092ca47-2068-4bc1-9e92-a085dc5eac3a-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285552 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285572 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285588 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkbq9\" (UniqueName: \"kubernetes.io/projected/5717cdb5-a227-4975-b808-068f0ace63c5-kube-api-access-wkbq9\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285607 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1092ca47-2068-4bc1-9e92-a085dc5eac3a-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285628 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/5717cdb5-a227-4975-b808-068f0ace63c5-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.285664 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/5717cdb5-a227-4975-b808-068f0ace63c5-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.287254 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.287518 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.287892 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5717cdb5-a227-4975-b808-068f0ace63c5-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.289925 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5717cdb5-a227-4975-b808-068f0ace63c5-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.296380 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/5717cdb5-a227-4975-b808-068f0ace63c5-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.299107 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/5717cdb5-a227-4975-b808-068f0ace63c5-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.310104 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.310324 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/5717cdb5-a227-4975-b808-068f0ace63c5-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.312289 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.332453 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkbq9\" (UniqueName: \"kubernetes.io/projected/5717cdb5-a227-4975-b808-068f0ace63c5-kube-api-access-wkbq9\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5717cdb5-a227-4975-b808-068f0ace63c5\") " pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.365894 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387447 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/1092ca47-2068-4bc1-9e92-a085dc5eac3a-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387498 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/f9a55300-7a89-493a-9001-1b77e3b64530-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387524 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/f9a55300-7a89-493a-9001-1b77e3b64530-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387550 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f9a55300-7a89-493a-9001-1b77e3b64530-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387573 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1092ca47-2068-4bc1-9e92-a085dc5eac3a-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387592 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9a55300-7a89-493a-9001-1b77e3b64530-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387618 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/f9a55300-7a89-493a-9001-1b77e3b64530-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387668 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt8dl\" (UniqueName: \"kubernetes.io/projected/1092ca47-2068-4bc1-9e92-a085dc5eac3a-kube-api-access-nt8dl\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387688 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387703 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/1092ca47-2068-4bc1-9e92-a085dc5eac3a-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387718 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387801 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1092ca47-2068-4bc1-9e92-a085dc5eac3a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387823 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/1092ca47-2068-4bc1-9e92-a085dc5eac3a-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.387845 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzrgq\" (UniqueName: \"kubernetes.io/projected/f9a55300-7a89-493a-9001-1b77e3b64530-kube-api-access-hzrgq\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.388209 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.388504 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1092ca47-2068-4bc1-9e92-a085dc5eac3a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.389006 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1092ca47-2068-4bc1-9e92-a085dc5eac3a-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.392214 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/1092ca47-2068-4bc1-9e92-a085dc5eac3a-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.397461 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/1092ca47-2068-4bc1-9e92-a085dc5eac3a-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.397895 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/1092ca47-2068-4bc1-9e92-a085dc5eac3a-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.405528 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt8dl\" (UniqueName: \"kubernetes.io/projected/1092ca47-2068-4bc1-9e92-a085dc5eac3a-kube-api-access-nt8dl\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.412138 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"1092ca47-2068-4bc1-9e92-a085dc5eac3a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.475749 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.489898 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzrgq\" (UniqueName: \"kubernetes.io/projected/f9a55300-7a89-493a-9001-1b77e3b64530-kube-api-access-hzrgq\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.489985 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/f9a55300-7a89-493a-9001-1b77e3b64530-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.490021 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/f9a55300-7a89-493a-9001-1b77e3b64530-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.490084 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f9a55300-7a89-493a-9001-1b77e3b64530-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.490142 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9a55300-7a89-493a-9001-1b77e3b64530-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.490182 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/f9a55300-7a89-493a-9001-1b77e3b64530-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.490294 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.490562 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.491910 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f9a55300-7a89-493a-9001-1b77e3b64530-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.492211 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9a55300-7a89-493a-9001-1b77e3b64530-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.494448 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/f9a55300-7a89-493a-9001-1b77e3b64530-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.495402 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/f9a55300-7a89-493a-9001-1b77e3b64530-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.495925 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/f9a55300-7a89-493a-9001-1b77e3b64530-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.514904 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.516613 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzrgq\" (UniqueName: \"kubernetes.io/projected/f9a55300-7a89-493a-9001-1b77e3b64530-kube-api-access-hzrgq\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"f9a55300-7a89-493a-9001-1b77e3b64530\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.579445 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 22:52:13 crc kubenswrapper[4755]: I0202 22:52:13.693148 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:16 crc kubenswrapper[4755]: I0202 22:52:16.152670 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"358a528d-56dd-4737-af2c-750423bbdc56","Type":"ContainerStarted","Data":"584d341809efd0db4757315d09a4d27356e8ab74aba11130a6d3901d16df1c91"} Feb 02 22:52:16 crc kubenswrapper[4755]: E0202 22:52:16.309560 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 02 22:52:16 crc kubenswrapper[4755]: E0202 22:52:16.309965 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hf6kf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-ntn7z_openstack(ceb03ee7-fe5e-46b0-8360-9e73297c5c05): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 22:52:16 crc kubenswrapper[4755]: E0202 22:52:16.311245 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" podUID="ceb03ee7-fe5e-46b0-8360-9e73297c5c05" Feb 02 22:52:16 crc kubenswrapper[4755]: E0202 22:52:16.319606 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 02 22:52:16 crc kubenswrapper[4755]: E0202 22:52:16.319818 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g54gg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-jwx9f_openstack(5c336de3-751a-4836-a2a3-102de5e0fd11): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 22:52:16 crc kubenswrapper[4755]: E0202 22:52:16.321186 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" podUID="5c336de3-751a-4836-a2a3-102de5e0fd11" Feb 02 22:52:16 crc kubenswrapper[4755]: E0202 22:52:16.329777 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 02 22:52:16 crc kubenswrapper[4755]: E0202 22:52:16.330251 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gl959,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-5bhdq_openstack(2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 22:52:16 crc kubenswrapper[4755]: E0202 22:52:16.331490 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" podUID="2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed" Feb 02 22:52:16 crc kubenswrapper[4755]: E0202 22:52:16.331948 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 02 22:52:16 crc kubenswrapper[4755]: E0202 22:52:16.332085 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2rkds,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-wdgnn_openstack(eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 22:52:16 crc kubenswrapper[4755]: E0202 22:52:16.334898 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" podUID="eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" Feb 02 22:52:16 crc kubenswrapper[4755]: I0202 22:52:16.824122 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 02 22:52:16 crc kubenswrapper[4755]: I0202 22:52:16.832565 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 02 22:52:16 crc kubenswrapper[4755]: I0202 22:52:16.838090 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 02 22:52:17 crc kubenswrapper[4755]: I0202 22:52:17.164046 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0","Type":"ContainerStarted","Data":"e12541feb2f2a6f5b1f994f2444f95e70406ab462464cc8c76ed32c0e3207a9e"} Feb 02 22:52:17 crc kubenswrapper[4755]: I0202 22:52:17.166368 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"91145094-ac53-469f-9ac1-e10732802d35","Type":"ContainerStarted","Data":"d3e43781d4f618bdac4c0ad13bdd4a10f492973d1c6256b2106fe0394951ee3b"} Feb 02 22:52:17 crc kubenswrapper[4755]: I0202 22:52:17.167841 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc","Type":"ContainerStarted","Data":"eb055b349b8efb98665c41f6e33ad0828915b0237e824775cf8f80781fb851a8"} Feb 02 22:52:17 crc kubenswrapper[4755]: I0202 22:52:17.169044 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"50c1f741-a034-4f5f-8cb2-1e7c5df9a090","Type":"ContainerStarted","Data":"02048ae52cef47d5e17968500d6274e063ce6e19bc65ba7d2c63c76f4092cce1"} Feb 02 22:52:17 crc kubenswrapper[4755]: E0202 22:52:17.170535 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" podUID="ceb03ee7-fe5e-46b0-8360-9e73297c5c05" Feb 02 22:52:17 crc kubenswrapper[4755]: E0202 22:52:17.170782 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" podUID="eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" Feb 02 22:52:17 crc kubenswrapper[4755]: I0202 22:52:17.728220 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Feb 02 22:52:17 crc kubenswrapper[4755]: I0202 22:52:17.755028 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh"] Feb 02 22:52:17 crc kubenswrapper[4755]: I0202 22:52:17.763417 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 02 22:52:17 crc kubenswrapper[4755]: I0202 22:52:17.774195 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Feb 02 22:52:17 crc kubenswrapper[4755]: I0202 22:52:17.780629 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jdd7m"] Feb 02 22:52:17 crc kubenswrapper[4755]: I0202 22:52:17.792766 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Feb 02 22:52:17 crc kubenswrapper[4755]: I0202 22:52:17.797406 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt"] Feb 02 22:52:17 crc kubenswrapper[4755]: W0202 22:52:17.975675 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ebaf1c7_94a5_47df_abbf_5ac70251c816.slice/crio-8c4bdfa5eccdb1c62aaf978fc71d06befa05ac73dd5d60510a5202c0ebb04eb0 WatchSource:0}: Error finding container 8c4bdfa5eccdb1c62aaf978fc71d06befa05ac73dd5d60510a5202c0ebb04eb0: Status 404 returned error can't find the container with id 8c4bdfa5eccdb1c62aaf978fc71d06befa05ac73dd5d60510a5202c0ebb04eb0 Feb 02 22:52:17 crc kubenswrapper[4755]: W0202 22:52:17.977158 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5717cdb5_a227_4975_b808_068f0ace63c5.slice/crio-53a0ef7e022421372396096ac7692afb58f818f423e1d0bd66750f042ebc2520 WatchSource:0}: Error finding container 53a0ef7e022421372396096ac7692afb58f818f423e1d0bd66750f042ebc2520: Status 404 returned error can't find the container with id 53a0ef7e022421372396096ac7692afb58f818f423e1d0bd66750f042ebc2520 Feb 02 22:52:17 crc kubenswrapper[4755]: W0202 22:52:17.992625 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1092ca47_2068_4bc1_9e92_a085dc5eac3a.slice/crio-a9cfb1934ca47902bfb8ca710e0a6c6be621c7137cbeb2ed84206e025fc8477a WatchSource:0}: Error finding container a9cfb1934ca47902bfb8ca710e0a6c6be621c7137cbeb2ed84206e025fc8477a: Status 404 returned error can't find the container with id a9cfb1934ca47902bfb8ca710e0a6c6be621c7137cbeb2ed84206e025fc8477a Feb 02 22:52:18 crc kubenswrapper[4755]: W0202 22:52:17.999392 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod957644f6_ace8_4ceb_88b6_7a2228097714.slice/crio-87528001bf5822b1eeedf8944dea7e48c68ff08d1eb6f961ed9283482aa16888 WatchSource:0}: Error finding container 87528001bf5822b1eeedf8944dea7e48c68ff08d1eb6f961ed9283482aa16888: Status 404 returned error can't find the container with id 87528001bf5822b1eeedf8944dea7e48c68ff08d1eb6f961ed9283482aa16888 Feb 02 22:52:18 crc kubenswrapper[4755]: W0202 22:52:18.003970 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06016f18_0f29_4d82_aa08_233d91c9a744.slice/crio-9849d5ea504e9bc0754d56f313772c8a29a2623452062fbca884a231de36e15e WatchSource:0}: Error finding container 9849d5ea504e9bc0754d56f313772c8a29a2623452062fbca884a231de36e15e: Status 404 returned error can't find the container with id 9849d5ea504e9bc0754d56f313772c8a29a2623452062fbca884a231de36e15e Feb 02 22:52:18 crc kubenswrapper[4755]: W0202 22:52:18.025006 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6c5284b_8717_4e3a_bfc6_73fcadc8303d.slice/crio-9ae6e53b6861fc0ab1863fc37690c7efc3d14e9083ff992ba84ac957157d8c81 WatchSource:0}: Error finding container 9ae6e53b6861fc0ab1863fc37690c7efc3d14e9083ff992ba84ac957157d8c81: Status 404 returned error can't find the container with id 9ae6e53b6861fc0ab1863fc37690c7efc3d14e9083ff992ba84ac957157d8c81 Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.077394 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.087699 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-bzn9p"] Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.096790 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.120402 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9"] Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.141783 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f"] Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.157306 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd"] Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.163675 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.179194 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"1092ca47-2068-4bc1-9e92-a085dc5eac3a","Type":"ContainerStarted","Data":"a9cfb1934ca47902bfb8ca710e0a6c6be621c7137cbeb2ed84206e025fc8477a"} Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.183469 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" event={"ID":"c6c5284b-8717-4e3a-bfc6-73fcadc8303d","Type":"ContainerStarted","Data":"9ae6e53b6861fc0ab1863fc37690c7efc3d14e9083ff992ba84ac957157d8c81"} Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.189435 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" event={"ID":"957644f6-ace8-4ceb-88b6-7a2228097714","Type":"ContainerStarted","Data":"87528001bf5822b1eeedf8944dea7e48c68ff08d1eb6f961ed9283482aa16888"} Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.190903 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" event={"ID":"5c336de3-751a-4836-a2a3-102de5e0fd11","Type":"ContainerDied","Data":"11d2d192ddcf9842d346e4c97b7289278d51ff97fef5c66d5b68a2e2d1d3314f"} Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.190909 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-jwx9f" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.193553 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"9664cb92-62aa-4d52-9936-96c48dc7c8d2","Type":"ContainerStarted","Data":"36c669c203d316d908406c9a6f971fa486682cef1c9fd925775c66e862f18a89"} Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.194860 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.194940 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-5bhdq" event={"ID":"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed","Type":"ContainerDied","Data":"d17397c1c86696e208f1ea24ae22325b7157a201aab9e7956154c48727695046"} Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.197625 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m" event={"ID":"06016f18-0f29-4d82-aa08-233d91c9a744","Type":"ContainerStarted","Data":"9849d5ea504e9bc0754d56f313772c8a29a2623452062fbca884a231de36e15e"} Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.204593 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"2ebaf1c7-94a5-47df-abbf-5ac70251c816","Type":"ContainerStarted","Data":"8c4bdfa5eccdb1c62aaf978fc71d06befa05ac73dd5d60510a5202c0ebb04eb0"} Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.206596 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"5717cdb5-a227-4975-b808-068f0ace63c5","Type":"ContainerStarted","Data":"53a0ef7e022421372396096ac7692afb58f818f423e1d0bd66750f042ebc2520"} Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.208102 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"58b4faf6-d651-4094-b0bd-857e9074d9a9","Type":"ContainerStarted","Data":"6fb82bf5a8c394829ac2dcdcea105aa583faa34495f7dfb958a9981f5423ecdc"} Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.211347 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"358a528d-56dd-4737-af2c-750423bbdc56","Type":"ContainerStarted","Data":"031ee13fb5a9e9bc160f26ee98bb344b428a4611b475b3d538d6bc14c4c2261f"} Feb 02 22:52:18 crc kubenswrapper[4755]: W0202 22:52:18.214617 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf1e2dcd_1dcd_44be_ae0f_e84fb7ce15f0.slice/crio-9e3ef8a10b491a66eec7b27c1ff4fc80307f0443e75a857d8c619c8e5f426e5b WatchSource:0}: Error finding container 9e3ef8a10b491a66eec7b27c1ff4fc80307f0443e75a857d8c619c8e5f426e5b: Status 404 returned error can't find the container with id 9e3ef8a10b491a66eec7b27c1ff4fc80307f0443e75a857d8c619c8e5f426e5b Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.215688 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.236420 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-dns-svc\") pod \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\" (UID: \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\") " Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.236864 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c336de3-751a-4836-a2a3-102de5e0fd11-config\") pod \"5c336de3-751a-4836-a2a3-102de5e0fd11\" (UID: \"5c336de3-751a-4836-a2a3-102de5e0fd11\") " Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.236962 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-config\") pod \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\" (UID: \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\") " Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.236999 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gl959\" (UniqueName: \"kubernetes.io/projected/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-kube-api-access-gl959\") pod \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\" (UID: \"2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed\") " Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.237029 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g54gg\" (UniqueName: \"kubernetes.io/projected/5c336de3-751a-4836-a2a3-102de5e0fd11-kube-api-access-g54gg\") pod \"5c336de3-751a-4836-a2a3-102de5e0fd11\" (UID: \"5c336de3-751a-4836-a2a3-102de5e0fd11\") " Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.237430 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed" (UID: "2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.237775 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c336de3-751a-4836-a2a3-102de5e0fd11-config" (OuterVolumeSpecName: "config") pod "5c336de3-751a-4836-a2a3-102de5e0fd11" (UID: "5c336de3-751a-4836-a2a3-102de5e0fd11"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.238062 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-config" (OuterVolumeSpecName: "config") pod "2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed" (UID: "2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.238640 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c336de3-751a-4836-a2a3-102de5e0fd11-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.238666 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.238678 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.261554 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-kube-api-access-gl959" (OuterVolumeSpecName: "kube-api-access-gl959") pod "2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed" (UID: "2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed"). InnerVolumeSpecName "kube-api-access-gl959". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.275372 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c336de3-751a-4836-a2a3-102de5e0fd11-kube-api-access-g54gg" (OuterVolumeSpecName: "kube-api-access-g54gg") pod "5c336de3-751a-4836-a2a3-102de5e0fd11" (UID: "5c336de3-751a-4836-a2a3-102de5e0fd11"). InnerVolumeSpecName "kube-api-access-g54gg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:18 crc kubenswrapper[4755]: W0202 22:52:18.321510 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd97d545_a2b9_47ea_a0cb_564ef5ac59d3.slice/crio-3a1c3ad707a8167fd76fff81078df9dd52a42931d08ca15ba43a6e5fb031ac61 WatchSource:0}: Error finding container 3a1c3ad707a8167fd76fff81078df9dd52a42931d08ca15ba43a6e5fb031ac61: Status 404 returned error can't find the container with id 3a1c3ad707a8167fd76fff81078df9dd52a42931d08ca15ba43a6e5fb031ac61 Feb 02 22:52:18 crc kubenswrapper[4755]: W0202 22:52:18.324620 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9a55300_7a89_493a_9001_1b77e3b64530.slice/crio-a03912d7fc1c458411c1e5d340546db3090a092b7ad96f001bd62a4c0a8ef12e WatchSource:0}: Error finding container a03912d7fc1c458411c1e5d340546db3090a092b7ad96f001bd62a4c0a8ef12e: Status 404 returned error can't find the container with id a03912d7fc1c458411c1e5d340546db3090a092b7ad96f001bd62a4c0a8ef12e Feb 02 22:52:18 crc kubenswrapper[4755]: W0202 22:52:18.335006 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod654caa39_8ecb_43ef_b132_aca5a922fd0f.slice/crio-c3090380ad4c7831afd47061a5f8e988d3796812d40e7ddec0132e242e766968 WatchSource:0}: Error finding container c3090380ad4c7831afd47061a5f8e988d3796812d40e7ddec0132e242e766968: Status 404 returned error can't find the container with id c3090380ad4c7831afd47061a5f8e988d3796812d40e7ddec0132e242e766968 Feb 02 22:52:18 crc kubenswrapper[4755]: W0202 22:52:18.336742 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddc85ceb_9be7_45ae_8a7b_fd1aabfed85a.slice/crio-e37f0882d29cec40e4f35455415c5d9bc6da191ab6784452f21dd6878b14fa95 WatchSource:0}: Error finding container e37f0882d29cec40e4f35455415c5d9bc6da191ab6784452f21dd6878b14fa95: Status 404 returned error can't find the container with id e37f0882d29cec40e4f35455415c5d9bc6da191ab6784452f21dd6878b14fa95 Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.339844 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gl959\" (UniqueName: \"kubernetes.io/projected/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed-kube-api-access-gl959\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.339863 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g54gg\" (UniqueName: \"kubernetes.io/projected/5c336de3-751a-4836-a2a3-102de5e0fd11-kube-api-access-g54gg\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:18 crc kubenswrapper[4755]: E0202 22:52:18.339989 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:gateway,Image:registry.redhat.io/openshift-logging/lokistack-gateway-rhel9@sha256:74d61619b9420655da84bc9939e37f76040b437a70e9c96eeb3267f00dfe88ad,Command:[],Args:[--debug.name=lokistack-gateway --web.listen=0.0.0.0:8080 --web.internal.listen=0.0.0.0:8081 --web.healthchecks.url=https://localhost:8080 --log.level=warn --logs.read.endpoint=https://cloudkitty-lokistack-query-frontend-http.openstack.svc.cluster.local:3100 --logs.tail.endpoint=https://cloudkitty-lokistack-query-frontend-http.openstack.svc.cluster.local:3100 --logs.write.endpoint=https://cloudkitty-lokistack-distributor-http.openstack.svc.cluster.local:3100 --logs.write-timeout=4m0s --rbac.config=/etc/lokistack-gateway/rbac.yaml --tenants.config=/etc/lokistack-gateway/tenants.yaml --server.read-timeout=48s --server.write-timeout=6m0s --tls.min-version=VersionTLS12 --tls.server.cert-file=/var/run/tls/http/server/tls.crt --tls.server.key-file=/var/run/tls/http/server/tls.key --tls.healthchecks.server-ca-file=/var/run/ca/server/service-ca.crt --tls.healthchecks.server-name=cloudkitty-lokistack-gateway-http.openstack.svc.cluster.local --tls.internal.server.cert-file=/var/run/tls/http/server/tls.crt --tls.internal.server.key-file=/var/run/tls/http/server/tls.key --tls.min-version=VersionTLS12 --tls.cipher-suites=TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 --logs.tls.ca-file=/var/run/ca/upstream/service-ca.crt --logs.tls.cert-file=/var/run/tls/http/upstream/tls.crt --logs.tls.key-file=/var/run/tls/http/upstream/tls.key --tls.client-auth-type=RequestClientCert],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},ContainerPort{Name:public,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:rbac,ReadOnly:true,MountPath:/etc/lokistack-gateway/rbac.yaml,SubPath:rbac.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tenants,ReadOnly:true,MountPath:/etc/lokistack-gateway/tenants.yaml,SubPath:tenants.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:lokistack-gateway,ReadOnly:true,MountPath:/etc/lokistack-gateway/lokistack-gateway.rego,SubPath:lokistack-gateway.rego,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tls-secret,ReadOnly:true,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-gateway-client-http,ReadOnly:true,MountPath:/var/run/tls/http/upstream,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:true,MountPath:/var/run/ca/upstream,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-gateway-ca-bundle,ReadOnly:true,MountPath:/var/run/ca/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-ca-bundle,ReadOnly:false,MountPath:/var/run/tenants-ca/cloudkitty,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ht7x9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/live,Port:{0 8081 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 8081 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:12,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-gateway-7db4f4db8c-kphqd_openstack(ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 22:52:18 crc kubenswrapper[4755]: E0202 22:52:18.341250 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" podUID="ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a" Feb 02 22:52:18 crc kubenswrapper[4755]: W0202 22:52:18.351953 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcaf81ad2_83b7_4930_a67e_0c8dce4690ad.slice/crio-65e2b2bfccee6e5a438f2109970c18373372f96c50b0da2da50fe72621439f51 WatchSource:0}: Error finding container 65e2b2bfccee6e5a438f2109970c18373372f96c50b0da2da50fe72621439f51: Status 404 returned error can't find the container with id 65e2b2bfccee6e5a438f2109970c18373372f96c50b0da2da50fe72621439f51 Feb 02 22:52:18 crc kubenswrapper[4755]: E0202 22:52:18.354930 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-sb,Image:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nf6h656hb7h64ch86h5b5h5b7h555h5d8h687h5d6h5b8h565h65ch65fhb6hd7h64chcbh5b6h578hf9h56h546h8bh68fh86h5b4h67dh65ch8h566q,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-sb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fhw78,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(caf81ad2-83b7-4930-a67e-0c8dce4690ad): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 22:52:18 crc kubenswrapper[4755]: E0202 22:52:18.360223 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstack-network-exporter,Image:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,Command:[/app/openstack-network-exporter],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPENSTACK_NETWORK_EXPORTER_YAML,Value:/etc/config/openstack-network-exporter.yaml,ValueFrom:nil,},EnvVar{Name:CONFIG_HASH,Value:nf6h656hb7h64ch86h5b5h5b7h555h5d8h687h5d6h5b8h565h65ch65fhb6hd7h64chcbh5b6h578hf9h56h546h8bh68fh86h5b4h67dh65ch8h566q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovnmetrics.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovnmetrics.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fhw78,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(caf81ad2-83b7-4930-a67e-0c8dce4690ad): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 02 22:52:18 crc kubenswrapper[4755]: E0202 22:52:18.361408 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ovsdbserver-sb\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"openstack-network-exporter\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack/ovsdbserver-sb-0" podUID="caf81ad2-83b7-4930-a67e-0c8dce4690ad" Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.620080 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jwx9f"] Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.645102 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jwx9f"] Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.702773 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5bhdq"] Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.737766 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5bhdq"] Feb 02 22:52:18 crc kubenswrapper[4755]: I0202 22:52:18.866953 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 02 22:52:19 crc kubenswrapper[4755]: I0202 22:52:19.081101 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed" path="/var/lib/kubelet/pods/2e5c4d02-0fb9-48fc-b9fa-afb8a6c031ed/volumes" Feb 02 22:52:19 crc kubenswrapper[4755]: I0202 22:52:19.081455 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c336de3-751a-4836-a2a3-102de5e0fd11" path="/var/lib/kubelet/pods/5c336de3-751a-4836-a2a3-102de5e0fd11/volumes" Feb 02 22:52:19 crc kubenswrapper[4755]: I0202 22:52:19.218968 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" event={"ID":"654caa39-8ecb-43ef-b132-aca5a922fd0f","Type":"ContainerStarted","Data":"c3090380ad4c7831afd47061a5f8e988d3796812d40e7ddec0132e242e766968"} Feb 02 22:52:19 crc kubenswrapper[4755]: I0202 22:52:19.220525 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bzn9p" event={"ID":"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0","Type":"ContainerStarted","Data":"9e3ef8a10b491a66eec7b27c1ff4fc80307f0443e75a857d8c619c8e5f426e5b"} Feb 02 22:52:19 crc kubenswrapper[4755]: I0202 22:52:19.221566 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" event={"ID":"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3","Type":"ContainerStarted","Data":"3a1c3ad707a8167fd76fff81078df9dd52a42931d08ca15ba43a6e5fb031ac61"} Feb 02 22:52:19 crc kubenswrapper[4755]: I0202 22:52:19.223078 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" event={"ID":"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a","Type":"ContainerStarted","Data":"e37f0882d29cec40e4f35455415c5d9bc6da191ab6784452f21dd6878b14fa95"} Feb 02 22:52:19 crc kubenswrapper[4755]: E0202 22:52:19.224572 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/lokistack-gateway-rhel9@sha256:74d61619b9420655da84bc9939e37f76040b437a70e9c96eeb3267f00dfe88ad\\\"\"" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" podUID="ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a" Feb 02 22:52:19 crc kubenswrapper[4755]: I0202 22:52:19.226534 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"caf81ad2-83b7-4930-a67e-0c8dce4690ad","Type":"ContainerStarted","Data":"65e2b2bfccee6e5a438f2109970c18373372f96c50b0da2da50fe72621439f51"} Feb 02 22:52:19 crc kubenswrapper[4755]: E0202 22:52:19.230191 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ovsdbserver-sb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified\\\"\", failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"]" pod="openstack/ovsdbserver-sb-0" podUID="caf81ad2-83b7-4930-a67e-0c8dce4690ad" Feb 02 22:52:19 crc kubenswrapper[4755]: I0202 22:52:19.230588 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"f9a55300-7a89-493a-9001-1b77e3b64530","Type":"ContainerStarted","Data":"a03912d7fc1c458411c1e5d340546db3090a092b7ad96f001bd62a4c0a8ef12e"} Feb 02 22:52:20 crc kubenswrapper[4755]: I0202 22:52:20.241314 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"47898ca9-c1e8-4b61-9e3c-701743aff784","Type":"ContainerStarted","Data":"897e5e27fd4e4340740f2bfafb199f3192d1de850ec561ec27eafe0a9c26f765"} Feb 02 22:52:20 crc kubenswrapper[4755]: E0202 22:52:20.243100 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/lokistack-gateway-rhel9@sha256:74d61619b9420655da84bc9939e37f76040b437a70e9c96eeb3267f00dfe88ad\\\"\"" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" podUID="ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a" Feb 02 22:52:20 crc kubenswrapper[4755]: E0202 22:52:20.244291 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ovsdbserver-sb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified\\\"\", failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"]" pod="openstack/ovsdbserver-sb-0" podUID="caf81ad2-83b7-4930-a67e-0c8dce4690ad" Feb 02 22:52:30 crc kubenswrapper[4755]: E0202 22:52:30.960548 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:2b491fcb180423632d30811515a439a7a7f41023c1cfe4780647f18969b85a1d" Feb 02 22:52:30 crc kubenswrapper[4755]: E0202 22:52:30.961293 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-index-gateway,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:2b491fcb180423632d30811515a439a7a7f41023c1cfe4780647f18969b85a1d,Command:[],Args:[-target=index-gateway -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:AWS_ACCESS_KEY_ID,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_id,Optional:nil,},},},EnvVar{Name:AWS_ACCESS_KEY_SECRET,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_secret,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:storage,ReadOnly:false,MountPath:/tmp/loki,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-index-gateway-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-loki-s3,ReadOnly:false,MountPath:/etc/storage/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-index-gateway-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hzrgq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-index-gateway-0_openstack(f9a55300-7a89-493a-9001-1b77e3b64530): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 22:52:30 crc kubenswrapper[4755]: E0202 22:52:30.962492 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-index-gateway\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/cloudkitty-lokistack-index-gateway-0" podUID="f9a55300-7a89-493a-9001-1b77e3b64530" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.348682 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"1092ca47-2068-4bc1-9e92-a085dc5eac3a","Type":"ContainerStarted","Data":"19840350d2d386bc496e4cadcf1a044d3ff54eba9c7b88c60e2128d96894552e"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.350075 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.353560 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"f9a55300-7a89-493a-9001-1b77e3b64530","Type":"ContainerStarted","Data":"6b428e88b778ddf2963802c400b8a7f4b194aa664d9eea77bda5b91108e6a1d9"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.354109 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.360228 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" event={"ID":"c6c5284b-8717-4e3a-bfc6-73fcadc8303d","Type":"ContainerStarted","Data":"eed6902770cc652052635f8f2ad01da09847d72b05c4d221f5b178622f96cf9f"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.360746 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.362608 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" event={"ID":"654caa39-8ecb-43ef-b132-aca5a922fd0f","Type":"ContainerStarted","Data":"63ba86e3e53f1b50702df4c9f8519a4c78ce4e7c7e8090ec2d0a4302df673aaf"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.362635 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.363886 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bzn9p" event={"ID":"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0","Type":"ContainerStarted","Data":"801aa1bc16d38ab2ca5967ec3c9551c3d22de4dead6e462cd2c2ff25a8cf22a3"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.365462 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" event={"ID":"bd97d545-a2b9-47ea-a0cb-564ef5ac59d3","Type":"ContainerStarted","Data":"add7ff768131195ad03983ae710257fcd3a7f2ed332e8f1ff91a5fadefc5fba3"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.365876 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.367237 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m" event={"ID":"06016f18-0f29-4d82-aa08-233d91c9a744","Type":"ContainerStarted","Data":"1fd39793302b7f332dfcaafbf882d31dad05e2369b407fe8bf539ac6338ded7c"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.367617 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-jdd7m" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.369390 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"5717cdb5-a227-4975-b808-068f0ace63c5","Type":"ContainerStarted","Data":"be0ada9789e507aed5b28c3b5bc12585f09e15d253ab16efa436496134a7424c"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.369857 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.371079 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0","Type":"ContainerStarted","Data":"e85581a0ed13918efd261a9489e49a83ff69f315b04f62a5d76c90e8b24ac930"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.372631 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"91145094-ac53-469f-9ac1-e10732802d35","Type":"ContainerStarted","Data":"bc8684b388f9dcacbf081bf88b03d4cbaf4fb0cfb806b371317cc1b0431fab14"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.374220 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" event={"ID":"957644f6-ace8-4ceb-88b6-7a2228097714","Type":"ContainerStarted","Data":"af3bd72b9f4d80fffe5561f4ffb9f9d951fb3ead9e29ee13f4fcc79d4d52425b"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.374967 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.377064 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"50c1f741-a034-4f5f-8cb2-1e7c5df9a090","Type":"ContainerStarted","Data":"ac007e2e081ce2813bc19a9e76b1d7e19f9bcfa2074c4e1c1a164ae06fb5a922"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.377219 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.377269 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-compactor-0" podStartSLOduration=6.936119536 podStartE2EDuration="20.377260829s" podCreationTimestamp="2026-02-02 22:52:12 +0000 UTC" firstStartedPulling="2026-02-02 22:52:17.996042301 +0000 UTC m=+1093.687262627" lastFinishedPulling="2026-02-02 22:52:31.437183594 +0000 UTC m=+1107.128403920" observedRunningTime="2026-02-02 22:52:32.376366594 +0000 UTC m=+1108.067586930" watchObservedRunningTime="2026-02-02 22:52:32.377260829 +0000 UTC m=+1108.068481155" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.379419 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"47898ca9-c1e8-4b61-9e3c-701743aff784","Type":"ContainerStarted","Data":"2b25c36ed7bd953e8b7a7c11e1b79a7bbdea7d7a7ae131928c28ed036939ec52"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.383402 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"2ebaf1c7-94a5-47df-abbf-5ac70251c816","Type":"ContainerStarted","Data":"f387a1aa098da4c89b5a3051baf04a75e7aa8367009ce89264210b043e9fa1aa"} Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.383768 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.415268 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.426151 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh" podStartSLOduration=7.048464448 podStartE2EDuration="20.426136797s" podCreationTimestamp="2026-02-02 22:52:12 +0000 UTC" firstStartedPulling="2026-02-02 22:52:18.003480629 +0000 UTC m=+1093.694700955" lastFinishedPulling="2026-02-02 22:52:31.381152978 +0000 UTC m=+1107.072373304" observedRunningTime="2026-02-02 22:52:32.402929062 +0000 UTC m=+1108.094149388" watchObservedRunningTime="2026-02-02 22:52:32.426136797 +0000 UTC m=+1108.117357123" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.435948 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-jdd7m" podStartSLOduration=14.046892786 podStartE2EDuration="27.435932709s" podCreationTimestamp="2026-02-02 22:52:05 +0000 UTC" firstStartedPulling="2026-02-02 22:52:18.00850386 +0000 UTC m=+1093.699724186" lastFinishedPulling="2026-02-02 22:52:31.397543783 +0000 UTC m=+1107.088764109" observedRunningTime="2026-02-02 22:52:32.431851215 +0000 UTC m=+1108.123071541" watchObservedRunningTime="2026-02-02 22:52:32.435932709 +0000 UTC m=+1108.127153035" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.456027 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" podStartSLOduration=7.343485553 podStartE2EDuration="20.456010687s" podCreationTimestamp="2026-02-02 22:52:12 +0000 UTC" firstStartedPulling="2026-02-02 22:52:18.324713572 +0000 UTC m=+1094.015933898" lastFinishedPulling="2026-02-02 22:52:31.437238706 +0000 UTC m=+1107.128459032" observedRunningTime="2026-02-02 22:52:32.448533089 +0000 UTC m=+1108.139753415" watchObservedRunningTime="2026-02-02 22:52:32.456010687 +0000 UTC m=+1108.147231013" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.497969 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-ingester-0" podStartSLOduration=8.187288142 podStartE2EDuration="21.497950682s" podCreationTimestamp="2026-02-02 22:52:11 +0000 UTC" firstStartedPulling="2026-02-02 22:52:17.982129493 +0000 UTC m=+1093.673349819" lastFinishedPulling="2026-02-02 22:52:31.292792033 +0000 UTC m=+1106.984012359" observedRunningTime="2026-02-02 22:52:32.488827018 +0000 UTC m=+1108.180047354" watchObservedRunningTime="2026-02-02 22:52:32.497950682 +0000 UTC m=+1108.189171008" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.538257 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" podStartSLOduration=8.181043785 podStartE2EDuration="21.538241131s" podCreationTimestamp="2026-02-02 22:52:11 +0000 UTC" firstStartedPulling="2026-02-02 22:52:18.029513716 +0000 UTC m=+1093.720734042" lastFinishedPulling="2026-02-02 22:52:31.386711062 +0000 UTC m=+1107.077931388" observedRunningTime="2026-02-02 22:52:32.527080731 +0000 UTC m=+1108.218301057" watchObservedRunningTime="2026-02-02 22:52:32.538241131 +0000 UTC m=+1108.229461457" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.549232 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" podStartSLOduration=7.4483147370000005 podStartE2EDuration="20.549214056s" podCreationTimestamp="2026-02-02 22:52:12 +0000 UTC" firstStartedPulling="2026-02-02 22:52:18.336890582 +0000 UTC m=+1094.028110908" lastFinishedPulling="2026-02-02 22:52:31.437789901 +0000 UTC m=+1107.129010227" observedRunningTime="2026-02-02 22:52:32.54035023 +0000 UTC m=+1108.231570556" watchObservedRunningTime="2026-02-02 22:52:32.549214056 +0000 UTC m=+1108.240434382" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.570206 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-index-gateway-0" podStartSLOduration=-9223372016.284586 podStartE2EDuration="20.570189298s" podCreationTimestamp="2026-02-02 22:52:12 +0000 UTC" firstStartedPulling="2026-02-02 22:52:18.328958491 +0000 UTC m=+1094.020178817" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:52:32.565103787 +0000 UTC m=+1108.256324113" watchObservedRunningTime="2026-02-02 22:52:32.570189298 +0000 UTC m=+1108.261409624" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.608581 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=17.360243095 podStartE2EDuration="31.608562105s" podCreationTimestamp="2026-02-02 22:52:01 +0000 UTC" firstStartedPulling="2026-02-02 22:52:16.260786392 +0000 UTC m=+1091.952006728" lastFinishedPulling="2026-02-02 22:52:30.509105392 +0000 UTC m=+1106.200325738" observedRunningTime="2026-02-02 22:52:32.595024008 +0000 UTC m=+1108.286244334" watchObservedRunningTime="2026-02-02 22:52:32.608562105 +0000 UTC m=+1108.299782431" Feb 02 22:52:32 crc kubenswrapper[4755]: I0202 22:52:32.663678 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=20.305710041 podStartE2EDuration="33.663656425s" podCreationTimestamp="2026-02-02 22:51:59 +0000 UTC" firstStartedPulling="2026-02-02 22:52:17.98023208 +0000 UTC m=+1093.671452406" lastFinishedPulling="2026-02-02 22:52:31.338178464 +0000 UTC m=+1107.029398790" observedRunningTime="2026-02-02 22:52:32.655400286 +0000 UTC m=+1108.346620612" watchObservedRunningTime="2026-02-02 22:52:32.663656425 +0000 UTC m=+1108.354876741" Feb 02 22:52:33 crc kubenswrapper[4755]: I0202 22:52:33.409950 4755 generic.go:334] "Generic (PLEG): container finished" podID="cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0" containerID="801aa1bc16d38ab2ca5967ec3c9551c3d22de4dead6e462cd2c2ff25a8cf22a3" exitCode=0 Feb 02 22:52:33 crc kubenswrapper[4755]: I0202 22:52:33.410004 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bzn9p" event={"ID":"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0","Type":"ContainerDied","Data":"801aa1bc16d38ab2ca5967ec3c9551c3d22de4dead6e462cd2c2ff25a8cf22a3"} Feb 02 22:52:34 crc kubenswrapper[4755]: I0202 22:52:34.426209 4755 generic.go:334] "Generic (PLEG): container finished" podID="eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" containerID="ec02cb2a91681d5bd180b947b13814d32870fa7a3313bc0816d8853116b89b95" exitCode=0 Feb 02 22:52:34 crc kubenswrapper[4755]: I0202 22:52:34.426383 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" event={"ID":"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559","Type":"ContainerDied","Data":"ec02cb2a91681d5bd180b947b13814d32870fa7a3313bc0816d8853116b89b95"} Feb 02 22:52:34 crc kubenswrapper[4755]: I0202 22:52:34.476017 4755 generic.go:334] "Generic (PLEG): container finished" podID="ceb03ee7-fe5e-46b0-8360-9e73297c5c05" containerID="ef43c1445858a7d2ac02aa8e77740632ef9cbc1f4bed36eb617b85f087533d39" exitCode=0 Feb 02 22:52:34 crc kubenswrapper[4755]: I0202 22:52:34.476121 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" event={"ID":"ceb03ee7-fe5e-46b0-8360-9e73297c5c05","Type":"ContainerDied","Data":"ef43c1445858a7d2ac02aa8e77740632ef9cbc1f4bed36eb617b85f087533d39"} Feb 02 22:52:34 crc kubenswrapper[4755]: I0202 22:52:34.491148 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"9664cb92-62aa-4d52-9936-96c48dc7c8d2","Type":"ContainerStarted","Data":"a39c8e006e39019dedd36cc28cb8cb8d15086fdf7afe4ae2e3e1c48206397956"} Feb 02 22:52:34 crc kubenswrapper[4755]: I0202 22:52:34.545202 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc","Type":"ContainerStarted","Data":"e115599dcb652a384d5f72adc533a042b0ef4ae64e3fc1015e64d204f93e6195"} Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.555875 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"47898ca9-c1e8-4b61-9e3c-701743aff784","Type":"ContainerStarted","Data":"dc6a442f95fbc27f7fa0269537a0c4f6569839a001ac1d54e4639bc4a5ad4e30"} Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.557607 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" event={"ID":"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559","Type":"ContainerStarted","Data":"7a7b74a8dd1b104795c29a8a8287ba715d04262a6dacaaa0a78b482e4a30ab16"} Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.558217 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.562497 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" event={"ID":"ceb03ee7-fe5e-46b0-8360-9e73297c5c05","Type":"ContainerStarted","Data":"014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca"} Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.563186 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.566211 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bzn9p" event={"ID":"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0","Type":"ContainerStarted","Data":"41d76aa93c84ce86fdfd0223dc729437335675e95f6ceceeed0db0cd66bb030d"} Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.566265 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bzn9p" event={"ID":"cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0","Type":"ContainerStarted","Data":"e3cf6b301d0dabcc4c09d337c1f21f12d7627d549b59445ccff50accb84a4aec"} Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.566959 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.567243 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.568178 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" event={"ID":"ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a","Type":"ContainerStarted","Data":"a46dc7349548596b73cea2767d061cdb56bf39e8a3f61d44ba29d7c90886350c"} Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.569100 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.572905 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"caf81ad2-83b7-4930-a67e-0c8dce4690ad","Type":"ContainerStarted","Data":"fa48a4e9fcc9bdb6f988efb95db99e081da711c700a53ca3a96ad6e66ce4c414"} Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.573111 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"caf81ad2-83b7-4930-a67e-0c8dce4690ad","Type":"ContainerStarted","Data":"f041faaca5f68db6ded11065886836048e09a0dd0d16ac4943be76c191c041c6"} Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.586042 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.589166 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=16.394707304 podStartE2EDuration="31.589144675s" podCreationTimestamp="2026-02-02 22:52:04 +0000 UTC" firstStartedPulling="2026-02-02 22:52:19.42405206 +0000 UTC m=+1095.115272376" lastFinishedPulling="2026-02-02 22:52:34.618489421 +0000 UTC m=+1110.309709747" observedRunningTime="2026-02-02 22:52:35.5788489 +0000 UTC m=+1111.270069256" watchObservedRunningTime="2026-02-02 22:52:35.589144675 +0000 UTC m=+1111.280365011" Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.611126 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" podStartSLOduration=4.880268085 podStartE2EDuration="40.611098625s" podCreationTimestamp="2026-02-02 22:51:55 +0000 UTC" firstStartedPulling="2026-02-02 22:51:56.818985953 +0000 UTC m=+1072.510206279" lastFinishedPulling="2026-02-02 22:52:32.549816493 +0000 UTC m=+1108.241036819" observedRunningTime="2026-02-02 22:52:35.603959427 +0000 UTC m=+1111.295179773" watchObservedRunningTime="2026-02-02 22:52:35.611098625 +0000 UTC m=+1111.302318961" Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.636012 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=9.843986191 podStartE2EDuration="26.635979017s" podCreationTimestamp="2026-02-02 22:52:09 +0000 UTC" firstStartedPulling="2026-02-02 22:52:18.354763411 +0000 UTC m=+1094.045983737" lastFinishedPulling="2026-02-02 22:52:35.146756227 +0000 UTC m=+1110.837976563" observedRunningTime="2026-02-02 22:52:35.6252913 +0000 UTC m=+1111.316511626" watchObservedRunningTime="2026-02-02 22:52:35.635979017 +0000 UTC m=+1111.327199403" Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.651973 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-7db4f4db8c-kphqd" podStartSLOduration=-9223372013.202824 podStartE2EDuration="23.65195132s" podCreationTimestamp="2026-02-02 22:52:12 +0000 UTC" firstStartedPulling="2026-02-02 22:52:18.339802514 +0000 UTC m=+1094.031022840" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:52:35.647347462 +0000 UTC m=+1111.338567788" watchObservedRunningTime="2026-02-02 22:52:35.65195132 +0000 UTC m=+1111.343171666" Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.677530 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-bzn9p" podStartSLOduration=17.556195616 podStartE2EDuration="30.67751318s" podCreationTimestamp="2026-02-02 22:52:05 +0000 UTC" firstStartedPulling="2026-02-02 22:52:18.216826269 +0000 UTC m=+1093.908046595" lastFinishedPulling="2026-02-02 22:52:31.338143823 +0000 UTC m=+1107.029364159" observedRunningTime="2026-02-02 22:52:35.671426011 +0000 UTC m=+1111.362646367" watchObservedRunningTime="2026-02-02 22:52:35.67751318 +0000 UTC m=+1111.368733506" Feb 02 22:52:35 crc kubenswrapper[4755]: I0202 22:52:35.688253 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" podStartSLOduration=4.711007545 podStartE2EDuration="40.688236738s" podCreationTimestamp="2026-02-02 22:51:55 +0000 UTC" firstStartedPulling="2026-02-02 22:51:56.570712427 +0000 UTC m=+1072.261932753" lastFinishedPulling="2026-02-02 22:52:32.54794162 +0000 UTC m=+1108.239161946" observedRunningTime="2026-02-02 22:52:35.685096251 +0000 UTC m=+1111.376316597" watchObservedRunningTime="2026-02-02 22:52:35.688236738 +0000 UTC m=+1111.379457084" Feb 02 22:52:36 crc kubenswrapper[4755]: I0202 22:52:36.221370 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:36 crc kubenswrapper[4755]: I0202 22:52:36.241800 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:36 crc kubenswrapper[4755]: I0202 22:52:36.241976 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:36 crc kubenswrapper[4755]: I0202 22:52:36.296110 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:36 crc kubenswrapper[4755]: I0202 22:52:36.583156 4755 generic.go:334] "Generic (PLEG): container finished" podID="91145094-ac53-469f-9ac1-e10732802d35" containerID="bc8684b388f9dcacbf081bf88b03d4cbaf4fb0cfb806b371317cc1b0431fab14" exitCode=0 Feb 02 22:52:36 crc kubenswrapper[4755]: I0202 22:52:36.583355 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"91145094-ac53-469f-9ac1-e10732802d35","Type":"ContainerDied","Data":"bc8684b388f9dcacbf081bf88b03d4cbaf4fb0cfb806b371317cc1b0431fab14"} Feb 02 22:52:37 crc kubenswrapper[4755]: I0202 22:52:37.595480 4755 generic.go:334] "Generic (PLEG): container finished" podID="3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0" containerID="e85581a0ed13918efd261a9489e49a83ff69f315b04f62a5d76c90e8b24ac930" exitCode=0 Feb 02 22:52:37 crc kubenswrapper[4755]: I0202 22:52:37.595603 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0","Type":"ContainerDied","Data":"e85581a0ed13918efd261a9489e49a83ff69f315b04f62a5d76c90e8b24ac930"} Feb 02 22:52:37 crc kubenswrapper[4755]: I0202 22:52:37.598287 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"91145094-ac53-469f-9ac1-e10732802d35","Type":"ContainerStarted","Data":"acbadbf01a6d5a97f53b9fcb4b3988bc7f7bfa0164f73ef4b0f46a53f28bd082"} Feb 02 22:52:37 crc kubenswrapper[4755]: I0202 22:52:37.682904 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=25.216084131 podStartE2EDuration="39.682878731s" podCreationTimestamp="2026-02-02 22:51:58 +0000 UTC" firstStartedPulling="2026-02-02 22:52:16.913296278 +0000 UTC m=+1092.604516604" lastFinishedPulling="2026-02-02 22:52:31.380090878 +0000 UTC m=+1107.071311204" observedRunningTime="2026-02-02 22:52:37.671305439 +0000 UTC m=+1113.362525785" watchObservedRunningTime="2026-02-02 22:52:37.682878731 +0000 UTC m=+1113.374099107" Feb 02 22:52:37 crc kubenswrapper[4755]: I0202 22:52:37.690925 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Feb 02 22:52:37 crc kubenswrapper[4755]: I0202 22:52:37.975315 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wdgnn"] Feb 02 22:52:37 crc kubenswrapper[4755]: I0202 22:52:37.975648 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" podUID="eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" containerName="dnsmasq-dns" containerID="cri-o://7a7b74a8dd1b104795c29a8a8287ba715d04262a6dacaaa0a78b482e4a30ab16" gracePeriod=10 Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.016159 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-k28p4"] Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.017519 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.019324 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.030377 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-k28p4"] Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.112323 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-config\") pod \"dnsmasq-dns-5bf47b49b7-k28p4\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.112866 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-k28p4\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.112914 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgj8p\" (UniqueName: \"kubernetes.io/projected/6c535d87-4b67-4e2a-b070-d468468d1f91-kube-api-access-tgj8p\") pod \"dnsmasq-dns-5bf47b49b7-k28p4\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.112939 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-k28p4\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.214762 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-tpnqc"] Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.217580 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-config\") pod \"dnsmasq-dns-5bf47b49b7-k28p4\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.217687 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-k28p4\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.217738 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgj8p\" (UniqueName: \"kubernetes.io/projected/6c535d87-4b67-4e2a-b070-d468468d1f91-kube-api-access-tgj8p\") pod \"dnsmasq-dns-5bf47b49b7-k28p4\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.217760 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-k28p4\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.218532 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-k28p4\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.220102 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-config\") pod \"dnsmasq-dns-5bf47b49b7-k28p4\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.220512 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.221292 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.222893 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.223402 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-k28p4\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.232120 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-tpnqc"] Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.270022 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgj8p\" (UniqueName: \"kubernetes.io/projected/6c535d87-4b67-4e2a-b070-d468468d1f91-kube-api-access-tgj8p\") pod \"dnsmasq-dns-5bf47b49b7-k28p4\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.319484 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.319532 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-combined-ca-bundle\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.319560 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jglg5\" (UniqueName: \"kubernetes.io/projected/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-kube-api-access-jglg5\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.319604 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-ovs-rundir\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.319663 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-config\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.319694 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-ovn-rundir\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.334856 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.374237 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.422740 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-config\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.422797 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-ovn-rundir\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.422849 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.422871 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-combined-ca-bundle\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.422895 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jglg5\" (UniqueName: \"kubernetes.io/projected/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-kube-api-access-jglg5\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.422933 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-ovs-rundir\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.423199 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-ovs-rundir\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.423817 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-config\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.425000 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-ovn-rundir\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.427113 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.448820 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jglg5\" (UniqueName: \"kubernetes.io/projected/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-kube-api-access-jglg5\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.449763 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0641bc0d-aae7-45cf-b590-cbe9abe2c99e-combined-ca-bundle\") pod \"ovn-controller-metrics-tpnqc\" (UID: \"0641bc0d-aae7-45cf-b590-cbe9abe2c99e\") " pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.499879 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-ntn7z"] Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.500094 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" podUID="ceb03ee7-fe5e-46b0-8360-9e73297c5c05" containerName="dnsmasq-dns" containerID="cri-o://014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca" gracePeriod=10 Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.522362 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-zvgvj"] Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.524799 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.535008 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.541999 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tpnqc" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.548139 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-zvgvj"] Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.626391 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.626436 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-config\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.626461 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-dns-svc\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.626522 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mpmm\" (UniqueName: \"kubernetes.io/projected/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-kube-api-access-9mpmm\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.626619 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.666660 4755 generic.go:334] "Generic (PLEG): container finished" podID="eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" containerID="7a7b74a8dd1b104795c29a8a8287ba715d04262a6dacaaa0a78b482e4a30ab16" exitCode=0 Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.666748 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" event={"ID":"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559","Type":"ContainerDied","Data":"7a7b74a8dd1b104795c29a8a8287ba715d04262a6dacaaa0a78b482e4a30ab16"} Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.696409 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0","Type":"ContainerStarted","Data":"4a3957a72bf06e82bc86fa9c955122f3c5415f1b5e894f7719b95e61b9ae0446"} Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.733150 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=27.328976219 podStartE2EDuration="41.733130167s" podCreationTimestamp="2026-02-02 22:51:57 +0000 UTC" firstStartedPulling="2026-02-02 22:52:16.927620228 +0000 UTC m=+1092.618840554" lastFinishedPulling="2026-02-02 22:52:31.331774176 +0000 UTC m=+1107.022994502" observedRunningTime="2026-02-02 22:52:38.718361927 +0000 UTC m=+1114.409582253" watchObservedRunningTime="2026-02-02 22:52:38.733130167 +0000 UTC m=+1114.424350493" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.750927 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.756757 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.756811 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-config\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.756844 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-dns-svc\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.756926 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mpmm\" (UniqueName: \"kubernetes.io/projected/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-kube-api-access-9mpmm\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.757230 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.758191 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.760547 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.760945 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-config\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.761717 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-dns-svc\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.789238 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mpmm\" (UniqueName: \"kubernetes.io/projected/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-kube-api-access-9mpmm\") pod \"dnsmasq-dns-8554648995-zvgvj\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.858584 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rkds\" (UniqueName: \"kubernetes.io/projected/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-kube-api-access-2rkds\") pod \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\" (UID: \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\") " Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.859130 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-dns-svc\") pod \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\" (UID: \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\") " Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.859223 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-config\") pod \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\" (UID: \"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559\") " Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.863926 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-kube-api-access-2rkds" (OuterVolumeSpecName: "kube-api-access-2rkds") pod "eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" (UID: "eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559"). InnerVolumeSpecName "kube-api-access-2rkds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.882477 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.902085 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-config" (OuterVolumeSpecName: "config") pod "eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" (UID: "eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.919371 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" (UID: "eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.961879 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.962079 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rkds\" (UniqueName: \"kubernetes.io/projected/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-kube-api-access-2rkds\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:38 crc kubenswrapper[4755]: I0202 22:52:38.962165 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.200465 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.223210 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-k28p4"] Feb 02 22:52:39 crc kubenswrapper[4755]: W0202 22:52:39.225000 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c535d87_4b67_4e2a_b070_d468468d1f91.slice/crio-6d7e707c84a6ceeb2814c9f1de9d52895493836f96a01e1b938411ae67f63cc3 WatchSource:0}: Error finding container 6d7e707c84a6ceeb2814c9f1de9d52895493836f96a01e1b938411ae67f63cc3: Status 404 returned error can't find the container with id 6d7e707c84a6ceeb2814c9f1de9d52895493836f96a01e1b938411ae67f63cc3 Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.241466 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-tpnqc"] Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.267699 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-config\") pod \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\" (UID: \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\") " Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.267914 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-dns-svc\") pod \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\" (UID: \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\") " Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.268042 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hf6kf\" (UniqueName: \"kubernetes.io/projected/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-kube-api-access-hf6kf\") pod \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\" (UID: \"ceb03ee7-fe5e-46b0-8360-9e73297c5c05\") " Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.281014 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-kube-api-access-hf6kf" (OuterVolumeSpecName: "kube-api-access-hf6kf") pod "ceb03ee7-fe5e-46b0-8360-9e73297c5c05" (UID: "ceb03ee7-fe5e-46b0-8360-9e73297c5c05"). InnerVolumeSpecName "kube-api-access-hf6kf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.338775 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-config" (OuterVolumeSpecName: "config") pod "ceb03ee7-fe5e-46b0-8360-9e73297c5c05" (UID: "ceb03ee7-fe5e-46b0-8360-9e73297c5c05"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.345159 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ceb03ee7-fe5e-46b0-8360-9e73297c5c05" (UID: "ceb03ee7-fe5e-46b0-8360-9e73297c5c05"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.369895 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.369925 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hf6kf\" (UniqueName: \"kubernetes.io/projected/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-kube-api-access-hf6kf\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.369936 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ceb03ee7-fe5e-46b0-8360-9e73297c5c05-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.395201 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-zvgvj"] Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.705192 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tpnqc" event={"ID":"0641bc0d-aae7-45cf-b590-cbe9abe2c99e","Type":"ContainerStarted","Data":"40cee571b4fd9768df5970ba15789969db924764908a75d57ae32159c018f941"} Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.705539 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tpnqc" event={"ID":"0641bc0d-aae7-45cf-b590-cbe9abe2c99e","Type":"ContainerStarted","Data":"21bd4fb83b94fa84b4d85dfc50c89763433b153760d118853161bcf231823d6b"} Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.706865 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" containerID="4deb7cbccc97d4bdc9eb1f5fabeafc7c527958dd1ca8ae1b32aa24c3184a3da5" exitCode=0 Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.706926 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-zvgvj" event={"ID":"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f","Type":"ContainerDied","Data":"4deb7cbccc97d4bdc9eb1f5fabeafc7c527958dd1ca8ae1b32aa24c3184a3da5"} Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.706951 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-zvgvj" event={"ID":"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f","Type":"ContainerStarted","Data":"08eb59f52b271bf040121e371d297dcc3f907647592dcb1f90a6f6717d686f52"} Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.708930 4755 generic.go:334] "Generic (PLEG): container finished" podID="6c535d87-4b67-4e2a-b070-d468468d1f91" containerID="5488e6780372dd487e6897b3cb1f6abd7db9f681097b40a74b8602a24f6204eb" exitCode=0 Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.709095 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" event={"ID":"6c535d87-4b67-4e2a-b070-d468468d1f91","Type":"ContainerDied","Data":"5488e6780372dd487e6897b3cb1f6abd7db9f681097b40a74b8602a24f6204eb"} Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.709127 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" event={"ID":"6c535d87-4b67-4e2a-b070-d468468d1f91","Type":"ContainerStarted","Data":"6d7e707c84a6ceeb2814c9f1de9d52895493836f96a01e1b938411ae67f63cc3"} Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.714776 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" event={"ID":"eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559","Type":"ContainerDied","Data":"39688be6d753f7ac03abeff7feee541e52d8daa80aaed442ff301644821da5e8"} Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.714824 4755 scope.go:117] "RemoveContainer" containerID="7a7b74a8dd1b104795c29a8a8287ba715d04262a6dacaaa0a78b482e4a30ab16" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.714955 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-wdgnn" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.727240 4755 generic.go:334] "Generic (PLEG): container finished" podID="ceb03ee7-fe5e-46b0-8360-9e73297c5c05" containerID="014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca" exitCode=0 Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.729410 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.729872 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" event={"ID":"ceb03ee7-fe5e-46b0-8360-9e73297c5c05","Type":"ContainerDied","Data":"014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca"} Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.729973 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-ntn7z" event={"ID":"ceb03ee7-fe5e-46b0-8360-9e73297c5c05","Type":"ContainerDied","Data":"1794674d256c05e879346dc3fdc646dd0fcec7719e65d53f91cc74a85ee145db"} Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.747552 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-tpnqc" podStartSLOduration=1.747528266 podStartE2EDuration="1.747528266s" podCreationTimestamp="2026-02-02 22:52:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:52:39.72281373 +0000 UTC m=+1115.414034056" watchObservedRunningTime="2026-02-02 22:52:39.747528266 +0000 UTC m=+1115.438748592" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.874864 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-ntn7z"] Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.880622 4755 scope.go:117] "RemoveContainer" containerID="ec02cb2a91681d5bd180b947b13814d32870fa7a3313bc0816d8853116b89b95" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.884802 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-ntn7z"] Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.898716 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wdgnn"] Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.908445 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-wdgnn"] Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.919054 4755 scope.go:117] "RemoveContainer" containerID="014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca" Feb 02 22:52:39 crc kubenswrapper[4755]: I0202 22:52:39.960525 4755 scope.go:117] "RemoveContainer" containerID="ef43c1445858a7d2ac02aa8e77740632ef9cbc1f4bed36eb617b85f087533d39" Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.000604 4755 scope.go:117] "RemoveContainer" containerID="014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca" Feb 02 22:52:40 crc kubenswrapper[4755]: E0202 22:52:40.002287 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca\": container with ID starting with 014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca not found: ID does not exist" containerID="014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca" Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.002335 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca"} err="failed to get container status \"014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca\": rpc error: code = NotFound desc = could not find container \"014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca\": container with ID starting with 014ed3559017e7a423f9f1640b466026f48cfe56a2b4393adf7b635c41ebafca not found: ID does not exist" Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.002371 4755 scope.go:117] "RemoveContainer" containerID="ef43c1445858a7d2ac02aa8e77740632ef9cbc1f4bed36eb617b85f087533d39" Feb 02 22:52:40 crc kubenswrapper[4755]: E0202 22:52:40.003234 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef43c1445858a7d2ac02aa8e77740632ef9cbc1f4bed36eb617b85f087533d39\": container with ID starting with ef43c1445858a7d2ac02aa8e77740632ef9cbc1f4bed36eb617b85f087533d39 not found: ID does not exist" containerID="ef43c1445858a7d2ac02aa8e77740632ef9cbc1f4bed36eb617b85f087533d39" Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.003258 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef43c1445858a7d2ac02aa8e77740632ef9cbc1f4bed36eb617b85f087533d39"} err="failed to get container status \"ef43c1445858a7d2ac02aa8e77740632ef9cbc1f4bed36eb617b85f087533d39\": rpc error: code = NotFound desc = could not find container \"ef43c1445858a7d2ac02aa8e77740632ef9cbc1f4bed36eb617b85f087533d39\": container with ID starting with ef43c1445858a7d2ac02aa8e77740632ef9cbc1f4bed36eb617b85f087533d39 not found: ID does not exist" Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.301587 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.344642 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.345029 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.735320 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" event={"ID":"6c535d87-4b67-4e2a-b070-d468468d1f91","Type":"ContainerStarted","Data":"43a9f46510c4e0d11f1811cb11679204fa6acdbf45fca842cc1348f9aec43177"} Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.736243 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.739923 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-zvgvj" event={"ID":"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f","Type":"ContainerStarted","Data":"f8158f455f87254a92235d23122db7df788cdf0d75d40d9041dbd770736a9adc"} Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.739960 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.764809 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" podStartSLOduration=3.764791877 podStartE2EDuration="3.764791877s" podCreationTimestamp="2026-02-02 22:52:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:52:40.760803116 +0000 UTC m=+1116.452023452" watchObservedRunningTime="2026-02-02 22:52:40.764791877 +0000 UTC m=+1116.456012203" Feb 02 22:52:40 crc kubenswrapper[4755]: I0202 22:52:40.784060 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-zvgvj" podStartSLOduration=2.784041831 podStartE2EDuration="2.784041831s" podCreationTimestamp="2026-02-02 22:52:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:52:40.783462635 +0000 UTC m=+1116.474682971" watchObservedRunningTime="2026-02-02 22:52:40.784041831 +0000 UTC m=+1116.475262157" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.078372 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ceb03ee7-fe5e-46b0-8360-9e73297c5c05" path="/var/lib/kubelet/pods/ceb03ee7-fe5e-46b0-8360-9e73297c5c05/volumes" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.079039 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" path="/var/lib/kubelet/pods/eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559/volumes" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.265506 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.420940 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Feb 02 22:52:41 crc kubenswrapper[4755]: E0202 22:52:41.421459 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" containerName="init" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.421531 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" containerName="init" Feb 02 22:52:41 crc kubenswrapper[4755]: E0202 22:52:41.421587 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" containerName="dnsmasq-dns" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.421635 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" containerName="dnsmasq-dns" Feb 02 22:52:41 crc kubenswrapper[4755]: E0202 22:52:41.421695 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ceb03ee7-fe5e-46b0-8360-9e73297c5c05" containerName="dnsmasq-dns" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.421779 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ceb03ee7-fe5e-46b0-8360-9e73297c5c05" containerName="dnsmasq-dns" Feb 02 22:52:41 crc kubenswrapper[4755]: E0202 22:52:41.421851 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ceb03ee7-fe5e-46b0-8360-9e73297c5c05" containerName="init" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.421910 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ceb03ee7-fe5e-46b0-8360-9e73297c5c05" containerName="init" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.422136 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ceb03ee7-fe5e-46b0-8360-9e73297c5c05" containerName="dnsmasq-dns" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.422208 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb8e08d8-9a36-46df-8a9d-5ca2ea1a7559" containerName="dnsmasq-dns" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.423160 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.425720 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.425946 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.426076 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.428577 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-rwhm8" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.451768 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.516681 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/03b1c517-cae2-45bd-a887-fe41ad462721-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.516751 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/03b1c517-cae2-45bd-a887-fe41ad462721-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.516790 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cc7r8\" (UniqueName: \"kubernetes.io/projected/03b1c517-cae2-45bd-a887-fe41ad462721-kube-api-access-cc7r8\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.516857 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03b1c517-cae2-45bd-a887-fe41ad462721-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.516881 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/03b1c517-cae2-45bd-a887-fe41ad462721-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.516999 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03b1c517-cae2-45bd-a887-fe41ad462721-scripts\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.517031 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03b1c517-cae2-45bd-a887-fe41ad462721-config\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.618101 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03b1c517-cae2-45bd-a887-fe41ad462721-scripts\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.618147 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03b1c517-cae2-45bd-a887-fe41ad462721-config\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.618202 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/03b1c517-cae2-45bd-a887-fe41ad462721-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.618227 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/03b1c517-cae2-45bd-a887-fe41ad462721-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.618254 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cc7r8\" (UniqueName: \"kubernetes.io/projected/03b1c517-cae2-45bd-a887-fe41ad462721-kube-api-access-cc7r8\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.618277 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03b1c517-cae2-45bd-a887-fe41ad462721-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.618299 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/03b1c517-cae2-45bd-a887-fe41ad462721-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.618784 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/03b1c517-cae2-45bd-a887-fe41ad462721-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.619102 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03b1c517-cae2-45bd-a887-fe41ad462721-scripts\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.620079 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03b1c517-cae2-45bd-a887-fe41ad462721-config\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.626235 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/03b1c517-cae2-45bd-a887-fe41ad462721-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.626398 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/03b1c517-cae2-45bd-a887-fe41ad462721-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.626401 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03b1c517-cae2-45bd-a887-fe41ad462721-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.643706 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cc7r8\" (UniqueName: \"kubernetes.io/projected/03b1c517-cae2-45bd-a887-fe41ad462721-kube-api-access-cc7r8\") pod \"ovn-northd-0\" (UID: \"03b1c517-cae2-45bd-a887-fe41ad462721\") " pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.747341 4755 generic.go:334] "Generic (PLEG): container finished" podID="9664cb92-62aa-4d52-9936-96c48dc7c8d2" containerID="a39c8e006e39019dedd36cc28cb8cb8d15086fdf7afe4ae2e3e1c48206397956" exitCode=0 Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.747412 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"9664cb92-62aa-4d52-9936-96c48dc7c8d2","Type":"ContainerDied","Data":"a39c8e006e39019dedd36cc28cb8cb8d15086fdf7afe4ae2e3e1c48206397956"} Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.749084 4755 generic.go:334] "Generic (PLEG): container finished" podID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerID="e115599dcb652a384d5f72adc533a042b0ef4ae64e3fc1015e64d204f93e6195" exitCode=0 Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.749153 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc","Type":"ContainerDied","Data":"e115599dcb652a384d5f72adc533a042b0ef4ae64e3fc1015e64d204f93e6195"} Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.754326 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.932819 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-k28p4"] Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.965902 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-fzxs2"] Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.970688 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:41 crc kubenswrapper[4755]: I0202 22:52:41.976159 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.005577 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-fzxs2"] Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.030025 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vswfw\" (UniqueName: \"kubernetes.io/projected/6cf5da5b-b25b-4a10-a05c-a82300fe094b-kube-api-access-vswfw\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.030065 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.030123 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.030159 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-config\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.030210 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: E0202 22:52:42.078209 4755 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.36:36978->38.102.83.36:42873: write tcp 38.102.83.36:36978->38.102.83.36:42873: write: broken pipe Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.132073 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.132392 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-config\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.132441 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.132501 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vswfw\" (UniqueName: \"kubernetes.io/projected/6cf5da5b-b25b-4a10-a05c-a82300fe094b-kube-api-access-vswfw\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.132525 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.133397 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.133649 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.133858 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-config\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.134413 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.160317 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vswfw\" (UniqueName: \"kubernetes.io/projected/6cf5da5b-b25b-4a10-a05c-a82300fe094b-kube-api-access-vswfw\") pod \"dnsmasq-dns-b8fbc5445-fzxs2\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.305614 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.385249 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 02 22:52:42 crc kubenswrapper[4755]: W0202 22:52:42.396903 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03b1c517_cae2_45bd_a887_fe41ad462721.slice/crio-4c3d03ca1a1890ee95fc57e859d90cdcbf652c392968985f25206a03a499dec5 WatchSource:0}: Error finding container 4c3d03ca1a1890ee95fc57e859d90cdcbf652c392968985f25206a03a499dec5: Status 404 returned error can't find the container with id 4c3d03ca1a1890ee95fc57e859d90cdcbf652c392968985f25206a03a499dec5 Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.652748 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.752656 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-fzxs2"] Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.807298 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" podUID="6c535d87-4b67-4e2a-b070-d468468d1f91" containerName="dnsmasq-dns" containerID="cri-o://43a9f46510c4e0d11f1811cb11679204fa6acdbf45fca842cc1348f9aec43177" gracePeriod=10 Feb 02 22:52:42 crc kubenswrapper[4755]: I0202 22:52:42.807445 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"03b1c517-cae2-45bd-a887-fe41ad462721","Type":"ContainerStarted","Data":"4c3d03ca1a1890ee95fc57e859d90cdcbf652c392968985f25206a03a499dec5"} Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.004154 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.104947 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.111120 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.113952 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.114069 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-hvn4r" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.114111 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.114229 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.128112 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.159017 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x28mg\" (UniqueName: \"kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-kube-api-access-x28mg\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.159103 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.159142 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/50a165a2-aeeb-4f83-9af3-a33f76b34a39-lock\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.159202 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-53c2b9ad-6483-4f23-83fd-bfeb4fdff125\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-53c2b9ad-6483-4f23-83fd-bfeb4fdff125\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.159291 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/50a165a2-aeeb-4f83-9af3-a33f76b34a39-cache\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.159313 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a165a2-aeeb-4f83-9af3-a33f76b34a39-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.260555 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/50a165a2-aeeb-4f83-9af3-a33f76b34a39-cache\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.261307 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a165a2-aeeb-4f83-9af3-a33f76b34a39-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.261270 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/50a165a2-aeeb-4f83-9af3-a33f76b34a39-cache\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.262257 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x28mg\" (UniqueName: \"kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-kube-api-access-x28mg\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.262343 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.262370 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/50a165a2-aeeb-4f83-9af3-a33f76b34a39-lock\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.262428 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-53c2b9ad-6483-4f23-83fd-bfeb4fdff125\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-53c2b9ad-6483-4f23-83fd-bfeb4fdff125\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: E0202 22:52:43.262967 4755 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 22:52:43 crc kubenswrapper[4755]: E0202 22:52:43.263007 4755 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 22:52:43 crc kubenswrapper[4755]: E0202 22:52:43.263068 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift podName:50a165a2-aeeb-4f83-9af3-a33f76b34a39 nodeName:}" failed. No retries permitted until 2026-02-02 22:52:43.763044358 +0000 UTC m=+1119.454264684 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift") pod "swift-storage-0" (UID: "50a165a2-aeeb-4f83-9af3-a33f76b34a39") : configmap "swift-ring-files" not found Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.263186 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/50a165a2-aeeb-4f83-9af3-a33f76b34a39-lock\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.266603 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.266646 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-53c2b9ad-6483-4f23-83fd-bfeb4fdff125\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-53c2b9ad-6483-4f23-83fd-bfeb4fdff125\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9158ff1a42f5c2a1d9ff32fdd2e81482f6859ddaa3f19590493eb72bdcc50423/globalmount\"" pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.268059 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a165a2-aeeb-4f83-9af3-a33f76b34a39-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.289555 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x28mg\" (UniqueName: \"kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-kube-api-access-x28mg\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.307990 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-53c2b9ad-6483-4f23-83fd-bfeb4fdff125\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-53c2b9ad-6483-4f23-83fd-bfeb4fdff125\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.629208 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-6v7kn"] Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.630357 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.634874 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.634955 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.635053 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.643636 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-6v7kn"] Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.666644 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/18147166-0270-4289-a33b-8b5524919631-etc-swift\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.666692 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-combined-ca-bundle\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.666715 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/18147166-0270-4289-a33b-8b5524919631-ring-data-devices\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.666774 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-dispersionconf\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.667015 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/18147166-0270-4289-a33b-8b5524919631-scripts\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.667083 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-swiftconf\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.667244 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fln9f\" (UniqueName: \"kubernetes.io/projected/18147166-0270-4289-a33b-8b5524919631-kube-api-access-fln9f\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.670802 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-6v7kn"] Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.688612 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-g6jdb"] Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.689862 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.695275 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-g6jdb"] Feb 02 22:52:43 crc kubenswrapper[4755]: E0202 22:52:43.695713 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-fln9f ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-6v7kn" podUID="18147166-0270-4289-a33b-8b5524919631" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.768743 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-swiftconf\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.769086 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-combined-ca-bundle\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.769117 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.769142 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/980ec437-3885-426d-9b2c-1773951f8c86-etc-swift\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.769166 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fln9f\" (UniqueName: \"kubernetes.io/projected/18147166-0270-4289-a33b-8b5524919631-kube-api-access-fln9f\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.769183 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nx5xv\" (UniqueName: \"kubernetes.io/projected/980ec437-3885-426d-9b2c-1773951f8c86-kube-api-access-nx5xv\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.769206 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/18147166-0270-4289-a33b-8b5524919631-etc-swift\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.769239 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-combined-ca-bundle\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.769266 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/18147166-0270-4289-a33b-8b5524919631-ring-data-devices\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.769286 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/980ec437-3885-426d-9b2c-1773951f8c86-ring-data-devices\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: E0202 22:52:43.769414 4755 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 22:52:43 crc kubenswrapper[4755]: E0202 22:52:43.769438 4755 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 22:52:43 crc kubenswrapper[4755]: E0202 22:52:43.769836 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift podName:50a165a2-aeeb-4f83-9af3-a33f76b34a39 nodeName:}" failed. No retries permitted until 2026-02-02 22:52:44.769811866 +0000 UTC m=+1120.461032192 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift") pod "swift-storage-0" (UID: "50a165a2-aeeb-4f83-9af3-a33f76b34a39") : configmap "swift-ring-files" not found Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.770529 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/18147166-0270-4289-a33b-8b5524919631-ring-data-devices\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.770776 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/18147166-0270-4289-a33b-8b5524919631-etc-swift\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.770878 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-swiftconf\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.770943 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-dispersionconf\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.770987 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/18147166-0270-4289-a33b-8b5524919631-scripts\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.771024 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-dispersionconf\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.771060 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/980ec437-3885-426d-9b2c-1773951f8c86-scripts\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.771811 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/18147166-0270-4289-a33b-8b5524919631-scripts\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.777491 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-swiftconf\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.778232 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-dispersionconf\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.780593 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-combined-ca-bundle\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.790484 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fln9f\" (UniqueName: \"kubernetes.io/projected/18147166-0270-4289-a33b-8b5524919631-kube-api-access-fln9f\") pod \"swift-ring-rebalance-6v7kn\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.820690 4755 generic.go:334] "Generic (PLEG): container finished" podID="6c535d87-4b67-4e2a-b070-d468468d1f91" containerID="43a9f46510c4e0d11f1811cb11679204fa6acdbf45fca842cc1348f9aec43177" exitCode=0 Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.820787 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" event={"ID":"6c535d87-4b67-4e2a-b070-d468468d1f91","Type":"ContainerDied","Data":"43a9f46510c4e0d11f1811cb11679204fa6acdbf45fca842cc1348f9aec43177"} Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.825182 4755 generic.go:334] "Generic (PLEG): container finished" podID="6cf5da5b-b25b-4a10-a05c-a82300fe094b" containerID="8a6be0e50f6792220f047fa24b32e5af2433a25e2da576096be7de439bc8dc2f" exitCode=0 Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.825241 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.825323 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" event={"ID":"6cf5da5b-b25b-4a10-a05c-a82300fe094b","Type":"ContainerDied","Data":"8a6be0e50f6792220f047fa24b32e5af2433a25e2da576096be7de439bc8dc2f"} Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.825372 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" event={"ID":"6cf5da5b-b25b-4a10-a05c-a82300fe094b","Type":"ContainerStarted","Data":"06a010e0929c73dd3c961de1e06f37a94fdcc73eef730202f595f38165615e78"} Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.872075 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nx5xv\" (UniqueName: \"kubernetes.io/projected/980ec437-3885-426d-9b2c-1773951f8c86-kube-api-access-nx5xv\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.872149 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/980ec437-3885-426d-9b2c-1773951f8c86-ring-data-devices\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.872180 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-swiftconf\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.872240 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-dispersionconf\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.872270 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/980ec437-3885-426d-9b2c-1773951f8c86-scripts\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.872320 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-combined-ca-bundle\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.872350 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/980ec437-3885-426d-9b2c-1773951f8c86-etc-swift\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.872702 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/980ec437-3885-426d-9b2c-1773951f8c86-etc-swift\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.873622 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/980ec437-3885-426d-9b2c-1773951f8c86-ring-data-devices\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.874453 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/980ec437-3885-426d-9b2c-1773951f8c86-scripts\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.877577 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-dispersionconf\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.878146 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-combined-ca-bundle\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.887232 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-swiftconf\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.900437 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nx5xv\" (UniqueName: \"kubernetes.io/projected/980ec437-3885-426d-9b2c-1773951f8c86-kube-api-access-nx5xv\") pod \"swift-ring-rebalance-g6jdb\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.930350 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.932799 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.973240 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-combined-ca-bundle\") pod \"18147166-0270-4289-a33b-8b5524919631\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.973303 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgj8p\" (UniqueName: \"kubernetes.io/projected/6c535d87-4b67-4e2a-b070-d468468d1f91-kube-api-access-tgj8p\") pod \"6c535d87-4b67-4e2a-b070-d468468d1f91\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.973346 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-ovsdbserver-nb\") pod \"6c535d87-4b67-4e2a-b070-d468468d1f91\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.973377 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-dns-svc\") pod \"6c535d87-4b67-4e2a-b070-d468468d1f91\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.973454 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-config\") pod \"6c535d87-4b67-4e2a-b070-d468468d1f91\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.973562 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/18147166-0270-4289-a33b-8b5524919631-ring-data-devices\") pod \"18147166-0270-4289-a33b-8b5524919631\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.973587 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-swiftconf\") pod \"18147166-0270-4289-a33b-8b5524919631\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.973609 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/18147166-0270-4289-a33b-8b5524919631-scripts\") pod \"18147166-0270-4289-a33b-8b5524919631\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.973652 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-dispersionconf\") pod \"18147166-0270-4289-a33b-8b5524919631\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.973707 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/18147166-0270-4289-a33b-8b5524919631-etc-swift\") pod \"18147166-0270-4289-a33b-8b5524919631\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.973771 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fln9f\" (UniqueName: \"kubernetes.io/projected/18147166-0270-4289-a33b-8b5524919631-kube-api-access-fln9f\") pod \"18147166-0270-4289-a33b-8b5524919631\" (UID: \"18147166-0270-4289-a33b-8b5524919631\") " Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.974073 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18147166-0270-4289-a33b-8b5524919631-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "18147166-0270-4289-a33b-8b5524919631" (UID: "18147166-0270-4289-a33b-8b5524919631"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.974153 4755 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/18147166-0270-4289-a33b-8b5524919631-ring-data-devices\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.974511 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18147166-0270-4289-a33b-8b5524919631-scripts" (OuterVolumeSpecName: "scripts") pod "18147166-0270-4289-a33b-8b5524919631" (UID: "18147166-0270-4289-a33b-8b5524919631"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.974529 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18147166-0270-4289-a33b-8b5524919631-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "18147166-0270-4289-a33b-8b5524919631" (UID: "18147166-0270-4289-a33b-8b5524919631"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.976996 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "18147166-0270-4289-a33b-8b5524919631" (UID: "18147166-0270-4289-a33b-8b5524919631"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.984304 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18147166-0270-4289-a33b-8b5524919631-kube-api-access-fln9f" (OuterVolumeSpecName: "kube-api-access-fln9f") pod "18147166-0270-4289-a33b-8b5524919631" (UID: "18147166-0270-4289-a33b-8b5524919631"). InnerVolumeSpecName "kube-api-access-fln9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.984484 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "18147166-0270-4289-a33b-8b5524919631" (UID: "18147166-0270-4289-a33b-8b5524919631"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.984534 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c535d87-4b67-4e2a-b070-d468468d1f91-kube-api-access-tgj8p" (OuterVolumeSpecName: "kube-api-access-tgj8p") pod "6c535d87-4b67-4e2a-b070-d468468d1f91" (UID: "6c535d87-4b67-4e2a-b070-d468468d1f91"). InnerVolumeSpecName "kube-api-access-tgj8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:43 crc kubenswrapper[4755]: I0202 22:52:43.984572 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "18147166-0270-4289-a33b-8b5524919631" (UID: "18147166-0270-4289-a33b-8b5524919631"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.012698 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-config" (OuterVolumeSpecName: "config") pod "6c535d87-4b67-4e2a-b070-d468468d1f91" (UID: "6c535d87-4b67-4e2a-b070-d468468d1f91"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.025650 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:52:44 crc kubenswrapper[4755]: E0202 22:52:44.031091 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-dns-svc podName:6c535d87-4b67-4e2a-b070-d468468d1f91 nodeName:}" failed. No retries permitted until 2026-02-02 22:52:44.531072044 +0000 UTC m=+1120.222292370 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "dns-svc" (UniqueName: "kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-dns-svc") pod "6c535d87-4b67-4e2a-b070-d468468d1f91" (UID: "6c535d87-4b67-4e2a-b070-d468468d1f91") : error deleting /var/lib/kubelet/pods/6c535d87-4b67-4e2a-b070-d468468d1f91/volume-subpaths: remove /var/lib/kubelet/pods/6c535d87-4b67-4e2a-b070-d468468d1f91/volume-subpaths: no such file or directory Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.031450 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6c535d87-4b67-4e2a-b070-d468468d1f91" (UID: "6c535d87-4b67-4e2a-b070-d468468d1f91"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.086686 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.086748 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgj8p\" (UniqueName: \"kubernetes.io/projected/6c535d87-4b67-4e2a-b070-d468468d1f91-kube-api-access-tgj8p\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.086760 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.086769 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.086783 4755 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-swiftconf\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.086803 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/18147166-0270-4289-a33b-8b5524919631-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.086812 4755 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/18147166-0270-4289-a33b-8b5524919631-dispersionconf\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.086820 4755 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/18147166-0270-4289-a33b-8b5524919631-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.086832 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fln9f\" (UniqueName: \"kubernetes.io/projected/18147166-0270-4289-a33b-8b5524919631-kube-api-access-fln9f\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.596030 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-dns-svc\") pod \"6c535d87-4b67-4e2a-b070-d468468d1f91\" (UID: \"6c535d87-4b67-4e2a-b070-d468468d1f91\") " Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.596688 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6c535d87-4b67-4e2a-b070-d468468d1f91" (UID: "6c535d87-4b67-4e2a-b070-d468468d1f91"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.597174 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c535d87-4b67-4e2a-b070-d468468d1f91-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.802711 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:44 crc kubenswrapper[4755]: E0202 22:52:44.802966 4755 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 22:52:44 crc kubenswrapper[4755]: E0202 22:52:44.802980 4755 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 22:52:44 crc kubenswrapper[4755]: E0202 22:52:44.803031 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift podName:50a165a2-aeeb-4f83-9af3-a33f76b34a39 nodeName:}" failed. No retries permitted until 2026-02-02 22:52:46.803016339 +0000 UTC m=+1122.494236665 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift") pod "swift-storage-0" (UID: "50a165a2-aeeb-4f83-9af3-a33f76b34a39") : configmap "swift-ring-files" not found Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.827251 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-g6jdb"] Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.840974 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" event={"ID":"6cf5da5b-b25b-4a10-a05c-a82300fe094b","Type":"ContainerStarted","Data":"919f111e4046619309b3de180d30ff225ab8d08eb07810b870554a01e7753c0e"} Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.841687 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.844170 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" event={"ID":"6c535d87-4b67-4e2a-b070-d468468d1f91","Type":"ContainerDied","Data":"6d7e707c84a6ceeb2814c9f1de9d52895493836f96a01e1b938411ae67f63cc3"} Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.844201 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-k28p4" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.844218 4755 scope.go:117] "RemoveContainer" containerID="43a9f46510c4e0d11f1811cb11679204fa6acdbf45fca842cc1348f9aec43177" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.861524 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-6v7kn" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.862319 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"03b1c517-cae2-45bd-a887-fe41ad462721","Type":"ContainerStarted","Data":"13c81f3907572786bb7ca0885973d9fc97e7f01c928a0f06f08404c1f928fef9"} Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.883461 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" podStartSLOduration=3.883437713 podStartE2EDuration="3.883437713s" podCreationTimestamp="2026-02-02 22:52:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:52:44.880386499 +0000 UTC m=+1120.571606825" watchObservedRunningTime="2026-02-02 22:52:44.883437713 +0000 UTC m=+1120.574658039" Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.900899 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-k28p4"] Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.909515 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-k28p4"] Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.948199 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-6v7kn"] Feb 02 22:52:44 crc kubenswrapper[4755]: I0202 22:52:44.963433 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-6v7kn"] Feb 02 22:52:45 crc kubenswrapper[4755]: I0202 22:52:45.080534 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18147166-0270-4289-a33b-8b5524919631" path="/var/lib/kubelet/pods/18147166-0270-4289-a33b-8b5524919631/volumes" Feb 02 22:52:45 crc kubenswrapper[4755]: I0202 22:52:45.080957 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c535d87-4b67-4e2a-b070-d468468d1f91" path="/var/lib/kubelet/pods/6c535d87-4b67-4e2a-b070-d468468d1f91/volumes" Feb 02 22:52:45 crc kubenswrapper[4755]: W0202 22:52:45.618653 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod980ec437_3885_426d_9b2c_1773951f8c86.slice/crio-d2df0a35675cbd662eee647ae1c2373488f8a55d45376062f5aa81d838010577 WatchSource:0}: Error finding container d2df0a35675cbd662eee647ae1c2373488f8a55d45376062f5aa81d838010577: Status 404 returned error can't find the container with id d2df0a35675cbd662eee647ae1c2373488f8a55d45376062f5aa81d838010577 Feb 02 22:52:45 crc kubenswrapper[4755]: I0202 22:52:45.631837 4755 scope.go:117] "RemoveContainer" containerID="5488e6780372dd487e6897b3cb1f6abd7db9f681097b40a74b8602a24f6204eb" Feb 02 22:52:45 crc kubenswrapper[4755]: I0202 22:52:45.869994 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-g6jdb" event={"ID":"980ec437-3885-426d-9b2c-1773951f8c86","Type":"ContainerStarted","Data":"d2df0a35675cbd662eee647ae1c2373488f8a55d45376062f5aa81d838010577"} Feb 02 22:52:46 crc kubenswrapper[4755]: I0202 22:52:46.844713 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:46 crc kubenswrapper[4755]: E0202 22:52:46.844837 4755 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 22:52:46 crc kubenswrapper[4755]: E0202 22:52:46.845140 4755 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 22:52:46 crc kubenswrapper[4755]: E0202 22:52:46.845193 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift podName:50a165a2-aeeb-4f83-9af3-a33f76b34a39 nodeName:}" failed. No retries permitted until 2026-02-02 22:52:50.8451776 +0000 UTC m=+1126.536397926 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift") pod "swift-storage-0" (UID: "50a165a2-aeeb-4f83-9af3-a33f76b34a39") : configmap "swift-ring-files" not found Feb 02 22:52:46 crc kubenswrapper[4755]: I0202 22:52:46.881818 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"03b1c517-cae2-45bd-a887-fe41ad462721","Type":"ContainerStarted","Data":"49e08540277d74f1576a7bd72042355a71079b40e3af211e71163bee5feda361"} Feb 02 22:52:46 crc kubenswrapper[4755]: I0202 22:52:46.882239 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Feb 02 22:52:46 crc kubenswrapper[4755]: I0202 22:52:46.884156 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"9664cb92-62aa-4d52-9936-96c48dc7c8d2","Type":"ContainerStarted","Data":"338cbe9a5335e37b0192e8e4e42ef0bcc34df08baa3ec156ddc2c3b1c018994d"} Feb 02 22:52:46 crc kubenswrapper[4755]: I0202 22:52:46.908974 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.995034892 podStartE2EDuration="5.908956022s" podCreationTimestamp="2026-02-02 22:52:41 +0000 UTC" firstStartedPulling="2026-02-02 22:52:42.398370507 +0000 UTC m=+1118.089590823" lastFinishedPulling="2026-02-02 22:52:44.312291637 +0000 UTC m=+1120.003511953" observedRunningTime="2026-02-02 22:52:46.901897686 +0000 UTC m=+1122.593118012" watchObservedRunningTime="2026-02-02 22:52:46.908956022 +0000 UTC m=+1122.600176348" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.609764 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.609825 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.731688 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.798753 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-qrxtc"] Feb 02 22:52:48 crc kubenswrapper[4755]: E0202 22:52:48.799350 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c535d87-4b67-4e2a-b070-d468468d1f91" containerName="dnsmasq-dns" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.799378 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c535d87-4b67-4e2a-b070-d468468d1f91" containerName="dnsmasq-dns" Feb 02 22:52:48 crc kubenswrapper[4755]: E0202 22:52:48.799406 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c535d87-4b67-4e2a-b070-d468468d1f91" containerName="init" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.799418 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c535d87-4b67-4e2a-b070-d468468d1f91" containerName="init" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.799778 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c535d87-4b67-4e2a-b070-d468468d1f91" containerName="dnsmasq-dns" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.800857 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qrxtc" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.803047 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.821984 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-qrxtc"] Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.885125 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.921548 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"9664cb92-62aa-4d52-9936-96c48dc7c8d2","Type":"ContainerStarted","Data":"718e3a62e1a6e92070f8532d90964f6c2d9a1b698bb9e2bc77e78f39b8170053"} Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.921746 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.927237 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.948591 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=19.200870682 podStartE2EDuration="46.948571734s" podCreationTimestamp="2026-02-02 22:52:02 +0000 UTC" firstStartedPulling="2026-02-02 22:52:17.993340036 +0000 UTC m=+1093.684560362" lastFinishedPulling="2026-02-02 22:52:45.741041088 +0000 UTC m=+1121.432261414" observedRunningTime="2026-02-02 22:52:48.945055746 +0000 UTC m=+1124.636276082" watchObservedRunningTime="2026-02-02 22:52:48.948571734 +0000 UTC m=+1124.639792060" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.988182 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dd5228b4-8e27-416a-8260-1b6d12b439a1-operator-scripts\") pod \"root-account-create-update-qrxtc\" (UID: \"dd5228b4-8e27-416a-8260-1b6d12b439a1\") " pod="openstack/root-account-create-update-qrxtc" Feb 02 22:52:48 crc kubenswrapper[4755]: I0202 22:52:48.988239 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-px2h2\" (UniqueName: \"kubernetes.io/projected/dd5228b4-8e27-416a-8260-1b6d12b439a1-kube-api-access-px2h2\") pod \"root-account-create-update-qrxtc\" (UID: \"dd5228b4-8e27-416a-8260-1b6d12b439a1\") " pod="openstack/root-account-create-update-qrxtc" Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.055329 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.089526 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dd5228b4-8e27-416a-8260-1b6d12b439a1-operator-scripts\") pod \"root-account-create-update-qrxtc\" (UID: \"dd5228b4-8e27-416a-8260-1b6d12b439a1\") " pod="openstack/root-account-create-update-qrxtc" Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.089561 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-px2h2\" (UniqueName: \"kubernetes.io/projected/dd5228b4-8e27-416a-8260-1b6d12b439a1-kube-api-access-px2h2\") pod \"root-account-create-update-qrxtc\" (UID: \"dd5228b4-8e27-416a-8260-1b6d12b439a1\") " pod="openstack/root-account-create-update-qrxtc" Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.091650 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dd5228b4-8e27-416a-8260-1b6d12b439a1-operator-scripts\") pod \"root-account-create-update-qrxtc\" (UID: \"dd5228b4-8e27-416a-8260-1b6d12b439a1\") " pod="openstack/root-account-create-update-qrxtc" Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.119454 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-px2h2\" (UniqueName: \"kubernetes.io/projected/dd5228b4-8e27-416a-8260-1b6d12b439a1-kube-api-access-px2h2\") pod \"root-account-create-update-qrxtc\" (UID: \"dd5228b4-8e27-416a-8260-1b6d12b439a1\") " pod="openstack/root-account-create-update-qrxtc" Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.120990 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qrxtc" Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.782350 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-g7476"] Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.784725 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-g7476" Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.819094 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-g7476"] Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.904198 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-fc7d-account-create-update-bl74s"] Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.905919 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fc7d-account-create-update-bl74s" Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.907816 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.913505 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-fc7d-account-create-update-bl74s"] Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.914858 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb-operator-scripts\") pod \"keystone-db-create-g7476\" (UID: \"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb\") " pod="openstack/keystone-db-create-g7476" Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.914906 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcjql\" (UniqueName: \"kubernetes.io/projected/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb-kube-api-access-rcjql\") pod \"keystone-db-create-g7476\" (UID: \"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb\") " pod="openstack/keystone-db-create-g7476" Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.934299 4755 generic.go:334] "Generic (PLEG): container finished" podID="358a528d-56dd-4737-af2c-750423bbdc56" containerID="031ee13fb5a9e9bc160f26ee98bb344b428a4611b475b3d538d6bc14c4c2261f" exitCode=0 Feb 02 22:52:49 crc kubenswrapper[4755]: I0202 22:52:49.935119 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"358a528d-56dd-4737-af2c-750423bbdc56","Type":"ContainerDied","Data":"031ee13fb5a9e9bc160f26ee98bb344b428a4611b475b3d538d6bc14c4c2261f"} Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.016747 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcjql\" (UniqueName: \"kubernetes.io/projected/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb-kube-api-access-rcjql\") pod \"keystone-db-create-g7476\" (UID: \"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb\") " pod="openstack/keystone-db-create-g7476" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.016836 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc8666d1-433d-4bbe-8079-1675a028f85f-operator-scripts\") pod \"keystone-fc7d-account-create-update-bl74s\" (UID: \"dc8666d1-433d-4bbe-8079-1675a028f85f\") " pod="openstack/keystone-fc7d-account-create-update-bl74s" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.016906 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75kl2\" (UniqueName: \"kubernetes.io/projected/dc8666d1-433d-4bbe-8079-1675a028f85f-kube-api-access-75kl2\") pod \"keystone-fc7d-account-create-update-bl74s\" (UID: \"dc8666d1-433d-4bbe-8079-1675a028f85f\") " pod="openstack/keystone-fc7d-account-create-update-bl74s" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.016967 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb-operator-scripts\") pod \"keystone-db-create-g7476\" (UID: \"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb\") " pod="openstack/keystone-db-create-g7476" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.017654 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb-operator-scripts\") pod \"keystone-db-create-g7476\" (UID: \"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb\") " pod="openstack/keystone-db-create-g7476" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.043992 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcjql\" (UniqueName: \"kubernetes.io/projected/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb-kube-api-access-rcjql\") pod \"keystone-db-create-g7476\" (UID: \"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb\") " pod="openstack/keystone-db-create-g7476" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.098169 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-tpv4r"] Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.099823 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tpv4r" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.116176 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tpv4r"] Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.118544 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc8666d1-433d-4bbe-8079-1675a028f85f-operator-scripts\") pod \"keystone-fc7d-account-create-update-bl74s\" (UID: \"dc8666d1-433d-4bbe-8079-1675a028f85f\") " pod="openstack/keystone-fc7d-account-create-update-bl74s" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.118705 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75kl2\" (UniqueName: \"kubernetes.io/projected/dc8666d1-433d-4bbe-8079-1675a028f85f-kube-api-access-75kl2\") pod \"keystone-fc7d-account-create-update-bl74s\" (UID: \"dc8666d1-433d-4bbe-8079-1675a028f85f\") " pod="openstack/keystone-fc7d-account-create-update-bl74s" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.124832 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc8666d1-433d-4bbe-8079-1675a028f85f-operator-scripts\") pod \"keystone-fc7d-account-create-update-bl74s\" (UID: \"dc8666d1-433d-4bbe-8079-1675a028f85f\") " pod="openstack/keystone-fc7d-account-create-update-bl74s" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.140554 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75kl2\" (UniqueName: \"kubernetes.io/projected/dc8666d1-433d-4bbe-8079-1675a028f85f-kube-api-access-75kl2\") pod \"keystone-fc7d-account-create-update-bl74s\" (UID: \"dc8666d1-433d-4bbe-8079-1675a028f85f\") " pod="openstack/keystone-fc7d-account-create-update-bl74s" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.147274 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-g7476" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.219350 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-ca50-account-create-update-nzx6m"] Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.220901 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fc7d-account-create-update-bl74s" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.220973 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ca50-account-create-update-nzx6m" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.222787 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.227236 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05549d64-5e5c-4833-9180-b902c9563863-operator-scripts\") pod \"placement-db-create-tpv4r\" (UID: \"05549d64-5e5c-4833-9180-b902c9563863\") " pod="openstack/placement-db-create-tpv4r" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.227298 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrx9m\" (UniqueName: \"kubernetes.io/projected/05549d64-5e5c-4833-9180-b902c9563863-kube-api-access-zrx9m\") pod \"placement-db-create-tpv4r\" (UID: \"05549d64-5e5c-4833-9180-b902c9563863\") " pod="openstack/placement-db-create-tpv4r" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.233635 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-ca50-account-create-update-nzx6m"] Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.328538 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8c779f8-e41d-4df2-8b72-4812419f750f-operator-scripts\") pod \"placement-ca50-account-create-update-nzx6m\" (UID: \"f8c779f8-e41d-4df2-8b72-4812419f750f\") " pod="openstack/placement-ca50-account-create-update-nzx6m" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.328605 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrx9m\" (UniqueName: \"kubernetes.io/projected/05549d64-5e5c-4833-9180-b902c9563863-kube-api-access-zrx9m\") pod \"placement-db-create-tpv4r\" (UID: \"05549d64-5e5c-4833-9180-b902c9563863\") " pod="openstack/placement-db-create-tpv4r" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.328634 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flk7q\" (UniqueName: \"kubernetes.io/projected/f8c779f8-e41d-4df2-8b72-4812419f750f-kube-api-access-flk7q\") pod \"placement-ca50-account-create-update-nzx6m\" (UID: \"f8c779f8-e41d-4df2-8b72-4812419f750f\") " pod="openstack/placement-ca50-account-create-update-nzx6m" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.328876 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05549d64-5e5c-4833-9180-b902c9563863-operator-scripts\") pod \"placement-db-create-tpv4r\" (UID: \"05549d64-5e5c-4833-9180-b902c9563863\") " pod="openstack/placement-db-create-tpv4r" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.330607 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05549d64-5e5c-4833-9180-b902c9563863-operator-scripts\") pod \"placement-db-create-tpv4r\" (UID: \"05549d64-5e5c-4833-9180-b902c9563863\") " pod="openstack/placement-db-create-tpv4r" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.347762 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrx9m\" (UniqueName: \"kubernetes.io/projected/05549d64-5e5c-4833-9180-b902c9563863-kube-api-access-zrx9m\") pod \"placement-db-create-tpv4r\" (UID: \"05549d64-5e5c-4833-9180-b902c9563863\") " pod="openstack/placement-db-create-tpv4r" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.430400 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8c779f8-e41d-4df2-8b72-4812419f750f-operator-scripts\") pod \"placement-ca50-account-create-update-nzx6m\" (UID: \"f8c779f8-e41d-4df2-8b72-4812419f750f\") " pod="openstack/placement-ca50-account-create-update-nzx6m" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.431131 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8c779f8-e41d-4df2-8b72-4812419f750f-operator-scripts\") pod \"placement-ca50-account-create-update-nzx6m\" (UID: \"f8c779f8-e41d-4df2-8b72-4812419f750f\") " pod="openstack/placement-ca50-account-create-update-nzx6m" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.431218 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flk7q\" (UniqueName: \"kubernetes.io/projected/f8c779f8-e41d-4df2-8b72-4812419f750f-kube-api-access-flk7q\") pod \"placement-ca50-account-create-update-nzx6m\" (UID: \"f8c779f8-e41d-4df2-8b72-4812419f750f\") " pod="openstack/placement-ca50-account-create-update-nzx6m" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.449690 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flk7q\" (UniqueName: \"kubernetes.io/projected/f8c779f8-e41d-4df2-8b72-4812419f750f-kube-api-access-flk7q\") pod \"placement-ca50-account-create-update-nzx6m\" (UID: \"f8c779f8-e41d-4df2-8b72-4812419f750f\") " pod="openstack/placement-ca50-account-create-update-nzx6m" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.451170 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-nbwvc"] Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.453448 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nbwvc" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.461901 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-nbwvc"] Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.544516 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tpv4r" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.554991 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ca50-account-create-update-nzx6m" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.613086 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-8f17-account-create-update-z6n62"] Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.614587 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8f17-account-create-update-z6n62" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.616207 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.621339 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-8f17-account-create-update-z6n62"] Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.635050 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dmns\" (UniqueName: \"kubernetes.io/projected/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b-kube-api-access-4dmns\") pod \"glance-db-create-nbwvc\" (UID: \"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b\") " pod="openstack/glance-db-create-nbwvc" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.635204 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b-operator-scripts\") pod \"glance-db-create-nbwvc\" (UID: \"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b\") " pod="openstack/glance-db-create-nbwvc" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.737341 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a1f428c-a71f-4cfa-8d34-178584bad9a5-operator-scripts\") pod \"glance-8f17-account-create-update-z6n62\" (UID: \"7a1f428c-a71f-4cfa-8d34-178584bad9a5\") " pod="openstack/glance-8f17-account-create-update-z6n62" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.737440 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpmkk\" (UniqueName: \"kubernetes.io/projected/7a1f428c-a71f-4cfa-8d34-178584bad9a5-kube-api-access-kpmkk\") pod \"glance-8f17-account-create-update-z6n62\" (UID: \"7a1f428c-a71f-4cfa-8d34-178584bad9a5\") " pod="openstack/glance-8f17-account-create-update-z6n62" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.737522 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b-operator-scripts\") pod \"glance-db-create-nbwvc\" (UID: \"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b\") " pod="openstack/glance-db-create-nbwvc" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.737591 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dmns\" (UniqueName: \"kubernetes.io/projected/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b-kube-api-access-4dmns\") pod \"glance-db-create-nbwvc\" (UID: \"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b\") " pod="openstack/glance-db-create-nbwvc" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.739118 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b-operator-scripts\") pod \"glance-db-create-nbwvc\" (UID: \"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b\") " pod="openstack/glance-db-create-nbwvc" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.771322 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dmns\" (UniqueName: \"kubernetes.io/projected/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b-kube-api-access-4dmns\") pod \"glance-db-create-nbwvc\" (UID: \"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b\") " pod="openstack/glance-db-create-nbwvc" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.806644 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nbwvc" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.838852 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a1f428c-a71f-4cfa-8d34-178584bad9a5-operator-scripts\") pod \"glance-8f17-account-create-update-z6n62\" (UID: \"7a1f428c-a71f-4cfa-8d34-178584bad9a5\") " pod="openstack/glance-8f17-account-create-update-z6n62" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.838935 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpmkk\" (UniqueName: \"kubernetes.io/projected/7a1f428c-a71f-4cfa-8d34-178584bad9a5-kube-api-access-kpmkk\") pod \"glance-8f17-account-create-update-z6n62\" (UID: \"7a1f428c-a71f-4cfa-8d34-178584bad9a5\") " pod="openstack/glance-8f17-account-create-update-z6n62" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.840159 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a1f428c-a71f-4cfa-8d34-178584bad9a5-operator-scripts\") pod \"glance-8f17-account-create-update-z6n62\" (UID: \"7a1f428c-a71f-4cfa-8d34-178584bad9a5\") " pod="openstack/glance-8f17-account-create-update-z6n62" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.855987 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpmkk\" (UniqueName: \"kubernetes.io/projected/7a1f428c-a71f-4cfa-8d34-178584bad9a5-kube-api-access-kpmkk\") pod \"glance-8f17-account-create-update-z6n62\" (UID: \"7a1f428c-a71f-4cfa-8d34-178584bad9a5\") " pod="openstack/glance-8f17-account-create-update-z6n62" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.932371 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8f17-account-create-update-z6n62" Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.940832 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:50 crc kubenswrapper[4755]: E0202 22:52:50.941002 4755 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 22:52:50 crc kubenswrapper[4755]: E0202 22:52:50.941023 4755 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 22:52:50 crc kubenswrapper[4755]: E0202 22:52:50.941072 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift podName:50a165a2-aeeb-4f83-9af3-a33f76b34a39 nodeName:}" failed. No retries permitted until 2026-02-02 22:52:58.941056325 +0000 UTC m=+1134.632276651 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift") pod "swift-storage-0" (UID: "50a165a2-aeeb-4f83-9af3-a33f76b34a39") : configmap "swift-ring-files" not found Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.946676 4755 generic.go:334] "Generic (PLEG): container finished" podID="58b4faf6-d651-4094-b0bd-857e9074d9a9" containerID="6fb82bf5a8c394829ac2dcdcea105aa583faa34495f7dfb958a9981f5423ecdc" exitCode=0 Feb 02 22:52:50 crc kubenswrapper[4755]: I0202 22:52:50.947459 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"58b4faf6-d651-4094-b0bd-857e9074d9a9","Type":"ContainerDied","Data":"6fb82bf5a8c394829ac2dcdcea105aa583faa34495f7dfb958a9981f5423ecdc"} Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.133458 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-distributor-66dfd9bb-99jqt" Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.307383 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.409505 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-qrxtc"] Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.418289 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-ca50-account-create-update-nzx6m"] Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.488550 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-zvgvj"] Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.489037 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-zvgvj" podUID="f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" containerName="dnsmasq-dns" containerID="cri-o://f8158f455f87254a92235d23122db7df788cdf0d75d40d9041dbd770736a9adc" gracePeriod=10 Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.523309 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-querier-795fd8f8cc-gc84f" Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.527157 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-query-frontend-5cd44666df-mhst9" Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.528291 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-fc7d-account-create-update-bl74s"] Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.544241 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-g7476"] Feb 02 22:52:52 crc kubenswrapper[4755]: W0202 22:52:52.571399 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ccddf6f_bdaf_40aa_9c6f_56e695369eeb.slice/crio-d15683085594cdbb3b764f3b31cad816700a9a21fb0b0fa2b630ac935566b2b6 WatchSource:0}: Error finding container d15683085594cdbb3b764f3b31cad816700a9a21fb0b0fa2b630ac935566b2b6: Status 404 returned error can't find the container with id d15683085594cdbb3b764f3b31cad816700a9a21fb0b0fa2b630ac935566b2b6 Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.963331 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-8f17-account-create-update-z6n62"] Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.981045 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-fc7d-account-create-update-bl74s" event={"ID":"dc8666d1-433d-4bbe-8079-1675a028f85f","Type":"ContainerStarted","Data":"f7250dc730697acf776ffa38ef5bad4ca6f8e665d0ae42a2c04251d2386f75b8"} Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.987917 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"358a528d-56dd-4737-af2c-750423bbdc56","Type":"ContainerStarted","Data":"fd778f826ddd9aa8888475dc8bfa3d0d58355f61057d10f8903274d71add7300"} Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.988183 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.989391 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-g7476" event={"ID":"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb","Type":"ContainerStarted","Data":"d15683085594cdbb3b764f3b31cad816700a9a21fb0b0fa2b630ac935566b2b6"} Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.990909 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc","Type":"ContainerStarted","Data":"e0dccd2ede44321dc0c2f4d658b5bed9355bae202ba13b3d4afc97e82c8d08fa"} Feb 02 22:52:52 crc kubenswrapper[4755]: W0202 22:52:52.991216 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a1f428c_a71f_4cfa_8d34_178584bad9a5.slice/crio-a3836270e47b4f9172aaf611deb7aa6f900803b96118187b4b5fd0febffd4dc8 WatchSource:0}: Error finding container a3836270e47b4f9172aaf611deb7aa6f900803b96118187b4b5fd0febffd4dc8: Status 404 returned error can't find the container with id a3836270e47b4f9172aaf611deb7aa6f900803b96118187b4b5fd0febffd4dc8 Feb 02 22:52:52 crc kubenswrapper[4755]: I0202 22:52:52.992019 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ca50-account-create-update-nzx6m" event={"ID":"f8c779f8-e41d-4df2-8b72-4812419f750f","Type":"ContainerStarted","Data":"84dd96693123007b8fccf1af9d014d9e1ccb5f2f484ddddf2f495c60e8b75150"} Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.001866 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"58b4faf6-d651-4094-b0bd-857e9074d9a9","Type":"ContainerStarted","Data":"002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393"} Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.002352 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.007902 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tpv4r"] Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.009758 4755 generic.go:334] "Generic (PLEG): container finished" podID="f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" containerID="f8158f455f87254a92235d23122db7df788cdf0d75d40d9041dbd770736a9adc" exitCode=0 Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.009807 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-zvgvj" event={"ID":"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f","Type":"ContainerDied","Data":"f8158f455f87254a92235d23122db7df788cdf0d75d40d9041dbd770736a9adc"} Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.011992 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-g6jdb" event={"ID":"980ec437-3885-426d-9b2c-1773951f8c86","Type":"ContainerStarted","Data":"971a7583051c014a7c546317c12ba54c50832218ae162090475729cfe69c0c5e"} Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.021804 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qrxtc" event={"ID":"dd5228b4-8e27-416a-8260-1b6d12b439a1","Type":"ContainerStarted","Data":"785feb84941507ba58c4222c78ed482d9d63006ddb9f6c11726b8241dbe714c7"} Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.023886 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-nbwvc"] Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.030988 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=55.852263679 podStartE2EDuration="57.030975364s" podCreationTimestamp="2026-02-02 22:51:56 +0000 UTC" firstStartedPulling="2026-02-02 22:52:15.483167651 +0000 UTC m=+1091.174388017" lastFinishedPulling="2026-02-02 22:52:16.661879386 +0000 UTC m=+1092.353099702" observedRunningTime="2026-02-02 22:52:53.025238115 +0000 UTC m=+1128.716458451" watchObservedRunningTime="2026-02-02 22:52:53.030975364 +0000 UTC m=+1128.722195690" Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.053366 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=45.614066444 podStartE2EDuration="58.053350045s" podCreationTimestamp="2026-02-02 22:51:55 +0000 UTC" firstStartedPulling="2026-02-02 22:52:04.021705163 +0000 UTC m=+1079.712925489" lastFinishedPulling="2026-02-02 22:52:16.460988764 +0000 UTC m=+1092.152209090" observedRunningTime="2026-02-02 22:52:53.052489822 +0000 UTC m=+1128.743710158" watchObservedRunningTime="2026-02-02 22:52:53.053350045 +0000 UTC m=+1128.744570371" Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.081033 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-g6jdb" podStartSLOduration=3.8450943300000002 podStartE2EDuration="10.081018034s" podCreationTimestamp="2026-02-02 22:52:43 +0000 UTC" firstStartedPulling="2026-02-02 22:52:45.63202787 +0000 UTC m=+1121.323248196" lastFinishedPulling="2026-02-02 22:52:51.867951574 +0000 UTC m=+1127.559171900" observedRunningTime="2026-02-02 22:52:53.078191276 +0000 UTC m=+1128.769411602" watchObservedRunningTime="2026-02-02 22:52:53.081018034 +0000 UTC m=+1128.772238360" Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.386112 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="5717cdb5-a227-4975-b808-068f0ace63c5" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.388863 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.388920 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.484343 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-compactor-0" Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.699702 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-index-gateway-0" Feb 02 22:52:53 crc kubenswrapper[4755]: I0202 22:52:53.883574 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8554648995-zvgvj" podUID="f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: connect: connection refused" Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.029784 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8f17-account-create-update-z6n62" event={"ID":"7a1f428c-a71f-4cfa-8d34-178584bad9a5","Type":"ContainerStarted","Data":"a3836270e47b4f9172aaf611deb7aa6f900803b96118187b4b5fd0febffd4dc8"} Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.031074 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nbwvc" event={"ID":"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b","Type":"ContainerStarted","Data":"9d55961535b2cbce9ed7bf334ef8fa7068cc32740bd1479a0858bf5409c6b38f"} Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.033278 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tpv4r" event={"ID":"05549d64-5e5c-4833-9180-b902c9563863","Type":"ContainerStarted","Data":"2c9cdfb4a8021d737c7e0cbd02703bd659c990daaa6d8f33c196da61e555b8b1"} Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.566574 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.674218 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-dns-svc\") pod \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.674320 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-ovsdbserver-nb\") pod \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.674375 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-config\") pod \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.674406 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mpmm\" (UniqueName: \"kubernetes.io/projected/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-kube-api-access-9mpmm\") pod \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.674438 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-ovsdbserver-sb\") pod \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\" (UID: \"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f\") " Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.680943 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-kube-api-access-9mpmm" (OuterVolumeSpecName: "kube-api-access-9mpmm") pod "f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" (UID: "f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f"). InnerVolumeSpecName "kube-api-access-9mpmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.730839 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" (UID: "f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.732422 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-config" (OuterVolumeSpecName: "config") pod "f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" (UID: "f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.749451 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" (UID: "f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.777015 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.777072 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.777087 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.777099 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mpmm\" (UniqueName: \"kubernetes.io/projected/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-kube-api-access-9mpmm\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.778381 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" (UID: "f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:54 crc kubenswrapper[4755]: I0202 22:52:54.878769 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.042312 4755 generic.go:334] "Generic (PLEG): container finished" podID="05549d64-5e5c-4833-9180-b902c9563863" containerID="f3722ac2476d144fef57641d38b14bc0b5d3dce1d30fc099de58866f69d9de04" exitCode=0 Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.042384 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tpv4r" event={"ID":"05549d64-5e5c-4833-9180-b902c9563863","Type":"ContainerDied","Data":"f3722ac2476d144fef57641d38b14bc0b5d3dce1d30fc099de58866f69d9de04"} Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.044464 4755 generic.go:334] "Generic (PLEG): container finished" podID="dc8666d1-433d-4bbe-8079-1675a028f85f" containerID="aa24f1c132f4b4bc8ab3d80d16774742b8a0e584c58c86c73b7c723474851cec" exitCode=0 Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.044528 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-fc7d-account-create-update-bl74s" event={"ID":"dc8666d1-433d-4bbe-8079-1675a028f85f","Type":"ContainerDied","Data":"aa24f1c132f4b4bc8ab3d80d16774742b8a0e584c58c86c73b7c723474851cec"} Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.047468 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc","Type":"ContainerStarted","Data":"4184e09c3258912403f80e83b518732aa855d8b2b2381ba02eb5a280b06c9b83"} Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.048824 4755 generic.go:334] "Generic (PLEG): container finished" podID="dd5228b4-8e27-416a-8260-1b6d12b439a1" containerID="441f708504e129ad598c3c3fe1dc01da921165d12d066f8b0330b996f14943cf" exitCode=0 Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.048877 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qrxtc" event={"ID":"dd5228b4-8e27-416a-8260-1b6d12b439a1","Type":"ContainerDied","Data":"441f708504e129ad598c3c3fe1dc01da921165d12d066f8b0330b996f14943cf"} Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.050182 4755 generic.go:334] "Generic (PLEG): container finished" podID="a1d37e1b-c28c-4fca-823b-3eb4c6e0364b" containerID="46464d7a60b85fdd8bb5b3a1463a61e2229173cfc9b335e80ed44fba4bf0fe9a" exitCode=0 Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.050237 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nbwvc" event={"ID":"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b","Type":"ContainerDied","Data":"46464d7a60b85fdd8bb5b3a1463a61e2229173cfc9b335e80ed44fba4bf0fe9a"} Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.052314 4755 generic.go:334] "Generic (PLEG): container finished" podID="9ccddf6f-bdaf-40aa-9c6f-56e695369eeb" containerID="95eba6d72b8aad6257655a0b6dd07c1999278fc7c9cb73c238e2e9b95312bc4b" exitCode=0 Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.052419 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-g7476" event={"ID":"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb","Type":"ContainerDied","Data":"95eba6d72b8aad6257655a0b6dd07c1999278fc7c9cb73c238e2e9b95312bc4b"} Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.054825 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-zvgvj" event={"ID":"f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f","Type":"ContainerDied","Data":"08eb59f52b271bf040121e371d297dcc3f907647592dcb1f90a6f6717d686f52"} Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.054864 4755 scope.go:117] "RemoveContainer" containerID="f8158f455f87254a92235d23122db7df788cdf0d75d40d9041dbd770736a9adc" Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.054984 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-zvgvj" Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.058863 4755 generic.go:334] "Generic (PLEG): container finished" podID="7a1f428c-a71f-4cfa-8d34-178584bad9a5" containerID="550835e02b06a1a6922975bd0a6914f8e27024c00d7d4135fc0c475194741957" exitCode=0 Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.058890 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8f17-account-create-update-z6n62" event={"ID":"7a1f428c-a71f-4cfa-8d34-178584bad9a5","Type":"ContainerDied","Data":"550835e02b06a1a6922975bd0a6914f8e27024c00d7d4135fc0c475194741957"} Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.063886 4755 generic.go:334] "Generic (PLEG): container finished" podID="f8c779f8-e41d-4df2-8b72-4812419f750f" containerID="8a749ceb46f50e0da84049c41f6b4dfbeb9cb0a3530cdd09681ded512311ebb3" exitCode=0 Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.063957 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ca50-account-create-update-nzx6m" event={"ID":"f8c779f8-e41d-4df2-8b72-4812419f750f","Type":"ContainerDied","Data":"8a749ceb46f50e0da84049c41f6b4dfbeb9cb0a3530cdd09681ded512311ebb3"} Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.186751 4755 scope.go:117] "RemoveContainer" containerID="4deb7cbccc97d4bdc9eb1f5fabeafc7c527958dd1ca8ae1b32aa24c3184a3da5" Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.226504 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-zvgvj"] Feb 02 22:52:55 crc kubenswrapper[4755]: I0202 22:52:55.235148 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-zvgvj"] Feb 02 22:52:56 crc kubenswrapper[4755]: I0202 22:52:56.518841 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qrxtc" Feb 02 22:52:56 crc kubenswrapper[4755]: I0202 22:52:56.608791 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-px2h2\" (UniqueName: \"kubernetes.io/projected/dd5228b4-8e27-416a-8260-1b6d12b439a1-kube-api-access-px2h2\") pod \"dd5228b4-8e27-416a-8260-1b6d12b439a1\" (UID: \"dd5228b4-8e27-416a-8260-1b6d12b439a1\") " Feb 02 22:52:56 crc kubenswrapper[4755]: I0202 22:52:56.608923 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dd5228b4-8e27-416a-8260-1b6d12b439a1-operator-scripts\") pod \"dd5228b4-8e27-416a-8260-1b6d12b439a1\" (UID: \"dd5228b4-8e27-416a-8260-1b6d12b439a1\") " Feb 02 22:52:56 crc kubenswrapper[4755]: I0202 22:52:56.609917 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd5228b4-8e27-416a-8260-1b6d12b439a1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dd5228b4-8e27-416a-8260-1b6d12b439a1" (UID: "dd5228b4-8e27-416a-8260-1b6d12b439a1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:56 crc kubenswrapper[4755]: I0202 22:52:56.615689 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd5228b4-8e27-416a-8260-1b6d12b439a1-kube-api-access-px2h2" (OuterVolumeSpecName: "kube-api-access-px2h2") pod "dd5228b4-8e27-416a-8260-1b6d12b439a1" (UID: "dd5228b4-8e27-416a-8260-1b6d12b439a1"). InnerVolumeSpecName "kube-api-access-px2h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:56 crc kubenswrapper[4755]: I0202 22:52:56.710679 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dd5228b4-8e27-416a-8260-1b6d12b439a1-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:56 crc kubenswrapper[4755]: I0202 22:52:56.710719 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-px2h2\" (UniqueName: \"kubernetes.io/projected/dd5228b4-8e27-416a-8260-1b6d12b439a1-kube-api-access-px2h2\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.081747 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" path="/var/lib/kubelet/pods/f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f/volumes" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.084406 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-qrxtc" event={"ID":"dd5228b4-8e27-416a-8260-1b6d12b439a1","Type":"ContainerDied","Data":"785feb84941507ba58c4222c78ed482d9d63006ddb9f6c11726b8241dbe714c7"} Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.084446 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="785feb84941507ba58c4222c78ed482d9d63006ddb9f6c11726b8241dbe714c7" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.084503 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-qrxtc" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.600584 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fc7d-account-create-update-bl74s" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.606567 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-g7476" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.613355 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tpv4r" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.618033 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ca50-account-create-update-nzx6m" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.647582 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8f17-account-create-update-z6n62" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.657948 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nbwvc" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.732531 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrx9m\" (UniqueName: \"kubernetes.io/projected/05549d64-5e5c-4833-9180-b902c9563863-kube-api-access-zrx9m\") pod \"05549d64-5e5c-4833-9180-b902c9563863\" (UID: \"05549d64-5e5c-4833-9180-b902c9563863\") " Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.732644 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb-operator-scripts\") pod \"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb\" (UID: \"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb\") " Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.732711 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05549d64-5e5c-4833-9180-b902c9563863-operator-scripts\") pod \"05549d64-5e5c-4833-9180-b902c9563863\" (UID: \"05549d64-5e5c-4833-9180-b902c9563863\") " Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.732761 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flk7q\" (UniqueName: \"kubernetes.io/projected/f8c779f8-e41d-4df2-8b72-4812419f750f-kube-api-access-flk7q\") pod \"f8c779f8-e41d-4df2-8b72-4812419f750f\" (UID: \"f8c779f8-e41d-4df2-8b72-4812419f750f\") " Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.732847 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpmkk\" (UniqueName: \"kubernetes.io/projected/7a1f428c-a71f-4cfa-8d34-178584bad9a5-kube-api-access-kpmkk\") pod \"7a1f428c-a71f-4cfa-8d34-178584bad9a5\" (UID: \"7a1f428c-a71f-4cfa-8d34-178584bad9a5\") " Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.732891 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcjql\" (UniqueName: \"kubernetes.io/projected/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb-kube-api-access-rcjql\") pod \"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb\" (UID: \"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb\") " Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.732914 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8c779f8-e41d-4df2-8b72-4812419f750f-operator-scripts\") pod \"f8c779f8-e41d-4df2-8b72-4812419f750f\" (UID: \"f8c779f8-e41d-4df2-8b72-4812419f750f\") " Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.732952 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a1f428c-a71f-4cfa-8d34-178584bad9a5-operator-scripts\") pod \"7a1f428c-a71f-4cfa-8d34-178584bad9a5\" (UID: \"7a1f428c-a71f-4cfa-8d34-178584bad9a5\") " Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.733007 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc8666d1-433d-4bbe-8079-1675a028f85f-operator-scripts\") pod \"dc8666d1-433d-4bbe-8079-1675a028f85f\" (UID: \"dc8666d1-433d-4bbe-8079-1675a028f85f\") " Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.733051 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75kl2\" (UniqueName: \"kubernetes.io/projected/dc8666d1-433d-4bbe-8079-1675a028f85f-kube-api-access-75kl2\") pod \"dc8666d1-433d-4bbe-8079-1675a028f85f\" (UID: \"dc8666d1-433d-4bbe-8079-1675a028f85f\") " Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.733514 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9ccddf6f-bdaf-40aa-9c6f-56e695369eeb" (UID: "9ccddf6f-bdaf-40aa-9c6f-56e695369eeb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.733652 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8c779f8-e41d-4df2-8b72-4812419f750f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f8c779f8-e41d-4df2-8b72-4812419f750f" (UID: "f8c779f8-e41d-4df2-8b72-4812419f750f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.733787 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a1f428c-a71f-4cfa-8d34-178584bad9a5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7a1f428c-a71f-4cfa-8d34-178584bad9a5" (UID: "7a1f428c-a71f-4cfa-8d34-178584bad9a5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.733820 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc8666d1-433d-4bbe-8079-1675a028f85f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dc8666d1-433d-4bbe-8079-1675a028f85f" (UID: "dc8666d1-433d-4bbe-8079-1675a028f85f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.734278 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05549d64-5e5c-4833-9180-b902c9563863-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "05549d64-5e5c-4833-9180-b902c9563863" (UID: "05549d64-5e5c-4833-9180-b902c9563863"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.734478 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc8666d1-433d-4bbe-8079-1675a028f85f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.734494 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.734504 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05549d64-5e5c-4833-9180-b902c9563863-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.734513 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8c779f8-e41d-4df2-8b72-4812419f750f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.734521 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a1f428c-a71f-4cfa-8d34-178584bad9a5-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.739061 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb-kube-api-access-rcjql" (OuterVolumeSpecName: "kube-api-access-rcjql") pod "9ccddf6f-bdaf-40aa-9c6f-56e695369eeb" (UID: "9ccddf6f-bdaf-40aa-9c6f-56e695369eeb"). InnerVolumeSpecName "kube-api-access-rcjql". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.739676 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8c779f8-e41d-4df2-8b72-4812419f750f-kube-api-access-flk7q" (OuterVolumeSpecName: "kube-api-access-flk7q") pod "f8c779f8-e41d-4df2-8b72-4812419f750f" (UID: "f8c779f8-e41d-4df2-8b72-4812419f750f"). InnerVolumeSpecName "kube-api-access-flk7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.747955 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a1f428c-a71f-4cfa-8d34-178584bad9a5-kube-api-access-kpmkk" (OuterVolumeSpecName: "kube-api-access-kpmkk") pod "7a1f428c-a71f-4cfa-8d34-178584bad9a5" (UID: "7a1f428c-a71f-4cfa-8d34-178584bad9a5"). InnerVolumeSpecName "kube-api-access-kpmkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.748054 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05549d64-5e5c-4833-9180-b902c9563863-kube-api-access-zrx9m" (OuterVolumeSpecName: "kube-api-access-zrx9m") pod "05549d64-5e5c-4833-9180-b902c9563863" (UID: "05549d64-5e5c-4833-9180-b902c9563863"). InnerVolumeSpecName "kube-api-access-zrx9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.748204 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc8666d1-433d-4bbe-8079-1675a028f85f-kube-api-access-75kl2" (OuterVolumeSpecName: "kube-api-access-75kl2") pod "dc8666d1-433d-4bbe-8079-1675a028f85f" (UID: "dc8666d1-433d-4bbe-8079-1675a028f85f"). InnerVolumeSpecName "kube-api-access-75kl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.835295 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b-operator-scripts\") pod \"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b\" (UID: \"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b\") " Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.835505 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dmns\" (UniqueName: \"kubernetes.io/projected/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b-kube-api-access-4dmns\") pod \"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b\" (UID: \"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b\") " Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.835915 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75kl2\" (UniqueName: \"kubernetes.io/projected/dc8666d1-433d-4bbe-8079-1675a028f85f-kube-api-access-75kl2\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.835933 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrx9m\" (UniqueName: \"kubernetes.io/projected/05549d64-5e5c-4833-9180-b902c9563863-kube-api-access-zrx9m\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.835943 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flk7q\" (UniqueName: \"kubernetes.io/projected/f8c779f8-e41d-4df2-8b72-4812419f750f-kube-api-access-flk7q\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.835953 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpmkk\" (UniqueName: \"kubernetes.io/projected/7a1f428c-a71f-4cfa-8d34-178584bad9a5-kube-api-access-kpmkk\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.835962 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcjql\" (UniqueName: \"kubernetes.io/projected/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb-kube-api-access-rcjql\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.836833 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a1d37e1b-c28c-4fca-823b-3eb4c6e0364b" (UID: "a1d37e1b-c28c-4fca-823b-3eb4c6e0364b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.838949 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b-kube-api-access-4dmns" (OuterVolumeSpecName: "kube-api-access-4dmns") pod "a1d37e1b-c28c-4fca-823b-3eb4c6e0364b" (UID: "a1d37e1b-c28c-4fca-823b-3eb4c6e0364b"). InnerVolumeSpecName "kube-api-access-4dmns". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.937612 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dmns\" (UniqueName: \"kubernetes.io/projected/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b-kube-api-access-4dmns\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:57 crc kubenswrapper[4755]: I0202 22:52:57.937643 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.096229 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8f17-account-create-update-z6n62" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.097168 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8f17-account-create-update-z6n62" event={"ID":"7a1f428c-a71f-4cfa-8d34-178584bad9a5","Type":"ContainerDied","Data":"a3836270e47b4f9172aaf611deb7aa6f900803b96118187b4b5fd0febffd4dc8"} Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.097207 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3836270e47b4f9172aaf611deb7aa6f900803b96118187b4b5fd0febffd4dc8" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.100096 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ca50-account-create-update-nzx6m" event={"ID":"f8c779f8-e41d-4df2-8b72-4812419f750f","Type":"ContainerDied","Data":"84dd96693123007b8fccf1af9d014d9e1ccb5f2f484ddddf2f495c60e8b75150"} Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.100131 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84dd96693123007b8fccf1af9d014d9e1ccb5f2f484ddddf2f495c60e8b75150" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.100222 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ca50-account-create-update-nzx6m" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.102986 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nbwvc" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.103028 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nbwvc" event={"ID":"a1d37e1b-c28c-4fca-823b-3eb4c6e0364b","Type":"ContainerDied","Data":"9d55961535b2cbce9ed7bf334ef8fa7068cc32740bd1479a0858bf5409c6b38f"} Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.103073 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d55961535b2cbce9ed7bf334ef8fa7068cc32740bd1479a0858bf5409c6b38f" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.104922 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tpv4r" event={"ID":"05549d64-5e5c-4833-9180-b902c9563863","Type":"ContainerDied","Data":"2c9cdfb4a8021d737c7e0cbd02703bd659c990daaa6d8f33c196da61e555b8b1"} Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.104970 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c9cdfb4a8021d737c7e0cbd02703bd659c990daaa6d8f33c196da61e555b8b1" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.105047 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tpv4r" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.114868 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-fc7d-account-create-update-bl74s" event={"ID":"dc8666d1-433d-4bbe-8079-1675a028f85f","Type":"ContainerDied","Data":"f7250dc730697acf776ffa38ef5bad4ca6f8e665d0ae42a2c04251d2386f75b8"} Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.114923 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7250dc730697acf776ffa38ef5bad4ca6f8e665d0ae42a2c04251d2386f75b8" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.115007 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fc7d-account-create-update-bl74s" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.118169 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-g7476" event={"ID":"9ccddf6f-bdaf-40aa-9c6f-56e695369eeb","Type":"ContainerDied","Data":"d15683085594cdbb3b764f3b31cad816700a9a21fb0b0fa2b630ac935566b2b6"} Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.118206 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d15683085594cdbb3b764f3b31cad816700a9a21fb0b0fa2b630ac935566b2b6" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.118263 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-g7476" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.121754 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc","Type":"ContainerStarted","Data":"2dba2f60b197315b9c9aeecc54c2dfba32b69489d7b7279575c7b88354eafc52"} Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.143639 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.162115 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=16.599940777 podStartE2EDuration="57.162101927s" podCreationTimestamp="2026-02-02 22:52:01 +0000 UTC" firstStartedPulling="2026-02-02 22:52:16.934445789 +0000 UTC m=+1092.625666115" lastFinishedPulling="2026-02-02 22:52:57.496606939 +0000 UTC m=+1133.187827265" observedRunningTime="2026-02-02 22:52:58.149188068 +0000 UTC m=+1133.840408434" watchObservedRunningTime="2026-02-02 22:52:58.162101927 +0000 UTC m=+1133.853322253" Feb 02 22:52:58 crc kubenswrapper[4755]: I0202 22:52:58.955834 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:52:58 crc kubenswrapper[4755]: E0202 22:52:58.956007 4755 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 02 22:52:58 crc kubenswrapper[4755]: E0202 22:52:58.956021 4755 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 02 22:52:58 crc kubenswrapper[4755]: E0202 22:52:58.956061 4755 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift podName:50a165a2-aeeb-4f83-9af3-a33f76b34a39 nodeName:}" failed. No retries permitted until 2026-02-02 22:53:14.956047413 +0000 UTC m=+1150.647267739 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift") pod "swift-storage-0" (UID: "50a165a2-aeeb-4f83-9af3-a33f76b34a39") : configmap "swift-ring-files" not found Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.143815 4755 generic.go:334] "Generic (PLEG): container finished" podID="980ec437-3885-426d-9b2c-1773951f8c86" containerID="971a7583051c014a7c546317c12ba54c50832218ae162090475729cfe69c0c5e" exitCode=0 Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.145186 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-g6jdb" event={"ID":"980ec437-3885-426d-9b2c-1773951f8c86","Type":"ContainerDied","Data":"971a7583051c014a7c546317c12ba54c50832218ae162090475729cfe69c0c5e"} Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.751701 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-fdxn5"] Feb 02 22:53:00 crc kubenswrapper[4755]: E0202 22:53:00.752372 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc8666d1-433d-4bbe-8079-1675a028f85f" containerName="mariadb-account-create-update" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752392 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc8666d1-433d-4bbe-8079-1675a028f85f" containerName="mariadb-account-create-update" Feb 02 22:53:00 crc kubenswrapper[4755]: E0202 22:53:00.752409 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a1f428c-a71f-4cfa-8d34-178584bad9a5" containerName="mariadb-account-create-update" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752418 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a1f428c-a71f-4cfa-8d34-178584bad9a5" containerName="mariadb-account-create-update" Feb 02 22:53:00 crc kubenswrapper[4755]: E0202 22:53:00.752438 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd5228b4-8e27-416a-8260-1b6d12b439a1" containerName="mariadb-account-create-update" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752448 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd5228b4-8e27-416a-8260-1b6d12b439a1" containerName="mariadb-account-create-update" Feb 02 22:53:00 crc kubenswrapper[4755]: E0202 22:53:00.752462 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" containerName="dnsmasq-dns" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752470 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" containerName="dnsmasq-dns" Feb 02 22:53:00 crc kubenswrapper[4755]: E0202 22:53:00.752488 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ccddf6f-bdaf-40aa-9c6f-56e695369eeb" containerName="mariadb-database-create" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752496 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ccddf6f-bdaf-40aa-9c6f-56e695369eeb" containerName="mariadb-database-create" Feb 02 22:53:00 crc kubenswrapper[4755]: E0202 22:53:00.752508 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" containerName="init" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752516 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" containerName="init" Feb 02 22:53:00 crc kubenswrapper[4755]: E0202 22:53:00.752534 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1d37e1b-c28c-4fca-823b-3eb4c6e0364b" containerName="mariadb-database-create" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752544 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1d37e1b-c28c-4fca-823b-3eb4c6e0364b" containerName="mariadb-database-create" Feb 02 22:53:00 crc kubenswrapper[4755]: E0202 22:53:00.752560 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05549d64-5e5c-4833-9180-b902c9563863" containerName="mariadb-database-create" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752571 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="05549d64-5e5c-4833-9180-b902c9563863" containerName="mariadb-database-create" Feb 02 22:53:00 crc kubenswrapper[4755]: E0202 22:53:00.752595 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8c779f8-e41d-4df2-8b72-4812419f750f" containerName="mariadb-account-create-update" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752606 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8c779f8-e41d-4df2-8b72-4812419f750f" containerName="mariadb-account-create-update" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752861 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4417ea9-ccc9-4ecf-9fe8-9e6e1c2ba72f" containerName="dnsmasq-dns" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752884 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc8666d1-433d-4bbe-8079-1675a028f85f" containerName="mariadb-account-create-update" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752897 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8c779f8-e41d-4df2-8b72-4812419f750f" containerName="mariadb-account-create-update" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752914 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1d37e1b-c28c-4fca-823b-3eb4c6e0364b" containerName="mariadb-database-create" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752929 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd5228b4-8e27-416a-8260-1b6d12b439a1" containerName="mariadb-account-create-update" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752944 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ccddf6f-bdaf-40aa-9c6f-56e695369eeb" containerName="mariadb-database-create" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752956 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="05549d64-5e5c-4833-9180-b902c9563863" containerName="mariadb-database-create" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.752965 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a1f428c-a71f-4cfa-8d34-178584bad9a5" containerName="mariadb-account-create-update" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.753710 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.755813 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2l6wf" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.756594 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.760754 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-fdxn5"] Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.889375 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-combined-ca-bundle\") pod \"glance-db-sync-fdxn5\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.889663 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5mtg\" (UniqueName: \"kubernetes.io/projected/02b6b0aa-4f58-4de4-83b7-c3291e005325-kube-api-access-v5mtg\") pod \"glance-db-sync-fdxn5\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.889838 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-config-data\") pod \"glance-db-sync-fdxn5\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.889882 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-db-sync-config-data\") pod \"glance-db-sync-fdxn5\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.991869 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5mtg\" (UniqueName: \"kubernetes.io/projected/02b6b0aa-4f58-4de4-83b7-c3291e005325-kube-api-access-v5mtg\") pod \"glance-db-sync-fdxn5\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.991944 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-config-data\") pod \"glance-db-sync-fdxn5\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.991972 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-db-sync-config-data\") pod \"glance-db-sync-fdxn5\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.992022 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-combined-ca-bundle\") pod \"glance-db-sync-fdxn5\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.998434 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-combined-ca-bundle\") pod \"glance-db-sync-fdxn5\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:00 crc kubenswrapper[4755]: I0202 22:53:00.999695 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-config-data\") pod \"glance-db-sync-fdxn5\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.001193 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-db-sync-config-data\") pod \"glance-db-sync-fdxn5\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.021721 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5mtg\" (UniqueName: \"kubernetes.io/projected/02b6b0aa-4f58-4de4-83b7-c3291e005325-kube-api-access-v5mtg\") pod \"glance-db-sync-fdxn5\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.087583 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.513869 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.607649 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/980ec437-3885-426d-9b2c-1773951f8c86-ring-data-devices\") pod \"980ec437-3885-426d-9b2c-1773951f8c86\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.607703 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nx5xv\" (UniqueName: \"kubernetes.io/projected/980ec437-3885-426d-9b2c-1773951f8c86-kube-api-access-nx5xv\") pod \"980ec437-3885-426d-9b2c-1773951f8c86\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.607786 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/980ec437-3885-426d-9b2c-1773951f8c86-etc-swift\") pod \"980ec437-3885-426d-9b2c-1773951f8c86\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.607843 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-swiftconf\") pod \"980ec437-3885-426d-9b2c-1773951f8c86\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.607890 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/980ec437-3885-426d-9b2c-1773951f8c86-scripts\") pod \"980ec437-3885-426d-9b2c-1773951f8c86\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.607954 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-combined-ca-bundle\") pod \"980ec437-3885-426d-9b2c-1773951f8c86\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.607998 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-dispersionconf\") pod \"980ec437-3885-426d-9b2c-1773951f8c86\" (UID: \"980ec437-3885-426d-9b2c-1773951f8c86\") " Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.608881 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/980ec437-3885-426d-9b2c-1773951f8c86-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "980ec437-3885-426d-9b2c-1773951f8c86" (UID: "980ec437-3885-426d-9b2c-1773951f8c86"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.609782 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/980ec437-3885-426d-9b2c-1773951f8c86-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "980ec437-3885-426d-9b2c-1773951f8c86" (UID: "980ec437-3885-426d-9b2c-1773951f8c86"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.612963 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/980ec437-3885-426d-9b2c-1773951f8c86-kube-api-access-nx5xv" (OuterVolumeSpecName: "kube-api-access-nx5xv") pod "980ec437-3885-426d-9b2c-1773951f8c86" (UID: "980ec437-3885-426d-9b2c-1773951f8c86"). InnerVolumeSpecName "kube-api-access-nx5xv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.621048 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "980ec437-3885-426d-9b2c-1773951f8c86" (UID: "980ec437-3885-426d-9b2c-1773951f8c86"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.629522 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/980ec437-3885-426d-9b2c-1773951f8c86-scripts" (OuterVolumeSpecName: "scripts") pod "980ec437-3885-426d-9b2c-1773951f8c86" (UID: "980ec437-3885-426d-9b2c-1773951f8c86"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.635008 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "980ec437-3885-426d-9b2c-1773951f8c86" (UID: "980ec437-3885-426d-9b2c-1773951f8c86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.648518 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "980ec437-3885-426d-9b2c-1773951f8c86" (UID: "980ec437-3885-426d-9b2c-1773951f8c86"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.710150 4755 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-swiftconf\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.710181 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/980ec437-3885-426d-9b2c-1773951f8c86-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.710192 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.710203 4755 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/980ec437-3885-426d-9b2c-1773951f8c86-dispersionconf\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.710212 4755 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/980ec437-3885-426d-9b2c-1773951f8c86-ring-data-devices\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.710221 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nx5xv\" (UniqueName: \"kubernetes.io/projected/980ec437-3885-426d-9b2c-1773951f8c86-kube-api-access-nx5xv\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.710232 4755 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/980ec437-3885-426d-9b2c-1773951f8c86-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:01 crc kubenswrapper[4755]: W0202 22:53:01.747027 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02b6b0aa_4f58_4de4_83b7_c3291e005325.slice/crio-0928139590d028ae619b01c1d4c69ce94ae09b0314491098bde10a75e14772a7 WatchSource:0}: Error finding container 0928139590d028ae619b01c1d4c69ce94ae09b0314491098bde10a75e14772a7: Status 404 returned error can't find the container with id 0928139590d028ae619b01c1d4c69ce94ae09b0314491098bde10a75e14772a7 Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.748563 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-fdxn5"] Feb 02 22:53:01 crc kubenswrapper[4755]: I0202 22:53:01.822570 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Feb 02 22:53:02 crc kubenswrapper[4755]: I0202 22:53:02.160821 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-g6jdb" Feb 02 22:53:02 crc kubenswrapper[4755]: I0202 22:53:02.161056 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-g6jdb" event={"ID":"980ec437-3885-426d-9b2c-1773951f8c86","Type":"ContainerDied","Data":"d2df0a35675cbd662eee647ae1c2373488f8a55d45376062f5aa81d838010577"} Feb 02 22:53:02 crc kubenswrapper[4755]: I0202 22:53:02.161122 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2df0a35675cbd662eee647ae1c2373488f8a55d45376062f5aa81d838010577" Feb 02 22:53:02 crc kubenswrapper[4755]: I0202 22:53:02.164223 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-fdxn5" event={"ID":"02b6b0aa-4f58-4de4-83b7-c3291e005325","Type":"ContainerStarted","Data":"0928139590d028ae619b01c1d4c69ce94ae09b0314491098bde10a75e14772a7"} Feb 02 22:53:02 crc kubenswrapper[4755]: I0202 22:53:02.244521 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-qrxtc"] Feb 02 22:53:02 crc kubenswrapper[4755]: I0202 22:53:02.253988 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-qrxtc"] Feb 02 22:53:03 crc kubenswrapper[4755]: I0202 22:53:03.084075 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd5228b4-8e27-416a-8260-1b6d12b439a1" path="/var/lib/kubelet/pods/dd5228b4-8e27-416a-8260-1b6d12b439a1/volumes" Feb 02 22:53:03 crc kubenswrapper[4755]: I0202 22:53:03.144368 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:03 crc kubenswrapper[4755]: I0202 22:53:03.149280 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:03 crc kubenswrapper[4755]: I0202 22:53:03.183149 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:03 crc kubenswrapper[4755]: I0202 22:53:03.373388 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="5717cdb5-a227-4975-b808-068f0ace63c5" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Feb 02 22:53:05 crc kubenswrapper[4755]: I0202 22:53:05.718742 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-jdd7m" podUID="06016f18-0f29-4d82-aa08-233d91c9a744" containerName="ovn-controller" probeResult="failure" output=< Feb 02 22:53:05 crc kubenswrapper[4755]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 02 22:53:05 crc kubenswrapper[4755]: > Feb 02 22:53:05 crc kubenswrapper[4755]: I0202 22:53:05.734485 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:53:05 crc kubenswrapper[4755]: I0202 22:53:05.735989 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-bzn9p" Feb 02 22:53:05 crc kubenswrapper[4755]: I0202 22:53:05.963047 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jdd7m-config-zqpxg"] Feb 02 22:53:05 crc kubenswrapper[4755]: E0202 22:53:05.963476 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980ec437-3885-426d-9b2c-1773951f8c86" containerName="swift-ring-rebalance" Feb 02 22:53:05 crc kubenswrapper[4755]: I0202 22:53:05.963499 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="980ec437-3885-426d-9b2c-1773951f8c86" containerName="swift-ring-rebalance" Feb 02 22:53:05 crc kubenswrapper[4755]: I0202 22:53:05.963722 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="980ec437-3885-426d-9b2c-1773951f8c86" containerName="swift-ring-rebalance" Feb 02 22:53:05 crc kubenswrapper[4755]: I0202 22:53:05.964426 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:05 crc kubenswrapper[4755]: I0202 22:53:05.967373 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 02 22:53:05 crc kubenswrapper[4755]: I0202 22:53:05.983987 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jdd7m-config-zqpxg"] Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.110272 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-run-ovn\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.110352 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-run\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.110603 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-log-ovn\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.110655 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vgvc\" (UniqueName: \"kubernetes.io/projected/968f2997-e427-4d75-9bce-ee87f49b381d-kube-api-access-4vgvc\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.110672 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/968f2997-e427-4d75-9bce-ee87f49b381d-scripts\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.110791 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/968f2997-e427-4d75-9bce-ee87f49b381d-additional-scripts\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.212495 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-run-ovn\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.212601 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-run\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.212738 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-log-ovn\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.212766 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vgvc\" (UniqueName: \"kubernetes.io/projected/968f2997-e427-4d75-9bce-ee87f49b381d-kube-api-access-4vgvc\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.212808 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/968f2997-e427-4d75-9bce-ee87f49b381d-scripts\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.212895 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/968f2997-e427-4d75-9bce-ee87f49b381d-additional-scripts\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.213036 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-run-ovn\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.213172 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-log-ovn\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.213543 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-run\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.214040 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/968f2997-e427-4d75-9bce-ee87f49b381d-additional-scripts\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.225416 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/968f2997-e427-4d75-9bce-ee87f49b381d-scripts\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.233593 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vgvc\" (UniqueName: \"kubernetes.io/projected/968f2997-e427-4d75-9bce-ee87f49b381d-kube-api-access-4vgvc\") pod \"ovn-controller-jdd7m-config-zqpxg\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.284438 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.717816 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.718287 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="prometheus" containerID="cri-o://e0dccd2ede44321dc0c2f4d658b5bed9355bae202ba13b3d4afc97e82c8d08fa" gracePeriod=600 Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.718403 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="thanos-sidecar" containerID="cri-o://2dba2f60b197315b9c9aeecc54c2dfba32b69489d7b7279575c7b88354eafc52" gracePeriod=600 Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.718443 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="config-reloader" containerID="cri-o://4184e09c3258912403f80e83b518732aa855d8b2b2381ba02eb5a280b06c9b83" gracePeriod=600 Feb 02 22:53:06 crc kubenswrapper[4755]: I0202 22:53:06.761524 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jdd7m-config-zqpxg"] Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.217131 4755 generic.go:334] "Generic (PLEG): container finished" podID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerID="2dba2f60b197315b9c9aeecc54c2dfba32b69489d7b7279575c7b88354eafc52" exitCode=0 Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.217482 4755 generic.go:334] "Generic (PLEG): container finished" podID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerID="4184e09c3258912403f80e83b518732aa855d8b2b2381ba02eb5a280b06c9b83" exitCode=0 Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.217498 4755 generic.go:334] "Generic (PLEG): container finished" podID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerID="e0dccd2ede44321dc0c2f4d658b5bed9355bae202ba13b3d4afc97e82c8d08fa" exitCode=0 Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.217551 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc","Type":"ContainerDied","Data":"2dba2f60b197315b9c9aeecc54c2dfba32b69489d7b7279575c7b88354eafc52"} Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.217587 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc","Type":"ContainerDied","Data":"4184e09c3258912403f80e83b518732aa855d8b2b2381ba02eb5a280b06c9b83"} Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.217605 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc","Type":"ContainerDied","Data":"e0dccd2ede44321dc0c2f4d658b5bed9355bae202ba13b3d4afc97e82c8d08fa"} Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.219383 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m-config-zqpxg" event={"ID":"968f2997-e427-4d75-9bce-ee87f49b381d","Type":"ContainerStarted","Data":"899197e3c492a122a57091f6f598739acb6b588f83d939fe40d80667db363b0c"} Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.219413 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m-config-zqpxg" event={"ID":"968f2997-e427-4d75-9bce-ee87f49b381d","Type":"ContainerStarted","Data":"03b253185bbdeae6d6087631a854282cd32f85c45c96f724cc9614caf23e193a"} Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.236744 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-jdd7m-config-zqpxg" podStartSLOduration=2.236704702 podStartE2EDuration="2.236704702s" podCreationTimestamp="2026-02-02 22:53:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:53:07.235065247 +0000 UTC m=+1142.926285583" watchObservedRunningTime="2026-02-02 22:53:07.236704702 +0000 UTC m=+1142.927925038" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.276421 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-l5tks"] Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.277798 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-l5tks" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.288552 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.301887 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-l5tks"] Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.331390 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76950441-48ad-4562-bbf6-15fd01c6d2d4-operator-scripts\") pod \"root-account-create-update-l5tks\" (UID: \"76950441-48ad-4562-bbf6-15fd01c6d2d4\") " pod="openstack/root-account-create-update-l5tks" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.331435 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2b6v\" (UniqueName: \"kubernetes.io/projected/76950441-48ad-4562-bbf6-15fd01c6d2d4-kube-api-access-x2b6v\") pod \"root-account-create-update-l5tks\" (UID: \"76950441-48ad-4562-bbf6-15fd01c6d2d4\") " pod="openstack/root-account-create-update-l5tks" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.434302 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76950441-48ad-4562-bbf6-15fd01c6d2d4-operator-scripts\") pod \"root-account-create-update-l5tks\" (UID: \"76950441-48ad-4562-bbf6-15fd01c6d2d4\") " pod="openstack/root-account-create-update-l5tks" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.434370 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2b6v\" (UniqueName: \"kubernetes.io/projected/76950441-48ad-4562-bbf6-15fd01c6d2d4-kube-api-access-x2b6v\") pod \"root-account-create-update-l5tks\" (UID: \"76950441-48ad-4562-bbf6-15fd01c6d2d4\") " pod="openstack/root-account-create-update-l5tks" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.435265 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76950441-48ad-4562-bbf6-15fd01c6d2d4-operator-scripts\") pod \"root-account-create-update-l5tks\" (UID: \"76950441-48ad-4562-bbf6-15fd01c6d2d4\") " pod="openstack/root-account-create-update-l5tks" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.454543 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2b6v\" (UniqueName: \"kubernetes.io/projected/76950441-48ad-4562-bbf6-15fd01c6d2d4-kube-api-access-x2b6v\") pod \"root-account-create-update-l5tks\" (UID: \"76950441-48ad-4562-bbf6-15fd01c6d2d4\") " pod="openstack/root-account-create-update-l5tks" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.503244 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.610101 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-l5tks" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.870247 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.956492 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-ftxm7"] Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.957623 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-ftxm7" Feb 02 22:53:07 crc kubenswrapper[4755]: I0202 22:53:07.966949 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-ftxm7"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.045428 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71e11ad7-2fb2-4059-997a-107df77d50aa-operator-scripts\") pod \"cinder-db-create-ftxm7\" (UID: \"71e11ad7-2fb2-4059-997a-107df77d50aa\") " pod="openstack/cinder-db-create-ftxm7" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.045537 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dtq8\" (UniqueName: \"kubernetes.io/projected/71e11ad7-2fb2-4059-997a-107df77d50aa-kube-api-access-6dtq8\") pod \"cinder-db-create-ftxm7\" (UID: \"71e11ad7-2fb2-4059-997a-107df77d50aa\") " pod="openstack/cinder-db-create-ftxm7" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.054689 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-8j88c"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.055958 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8j88c" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.075449 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-8j88c"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.136940 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-8750-account-create-update-hh7qm"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.138125 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-8750-account-create-update-hh7qm" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.141126 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.144677 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-8750-account-create-update-hh7qm"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.144751 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.114:9090/-/ready\": dial tcp 10.217.0.114:9090: connect: connection refused" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.147064 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/222237ea-cd02-4919-b29c-9770862a1d51-operator-scripts\") pod \"barbican-db-create-8j88c\" (UID: \"222237ea-cd02-4919-b29c-9770862a1d51\") " pod="openstack/barbican-db-create-8j88c" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.147115 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71e11ad7-2fb2-4059-997a-107df77d50aa-operator-scripts\") pod \"cinder-db-create-ftxm7\" (UID: \"71e11ad7-2fb2-4059-997a-107df77d50aa\") " pod="openstack/cinder-db-create-ftxm7" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.147194 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dtq8\" (UniqueName: \"kubernetes.io/projected/71e11ad7-2fb2-4059-997a-107df77d50aa-kube-api-access-6dtq8\") pod \"cinder-db-create-ftxm7\" (UID: \"71e11ad7-2fb2-4059-997a-107df77d50aa\") " pod="openstack/cinder-db-create-ftxm7" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.147250 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clpxz\" (UniqueName: \"kubernetes.io/projected/222237ea-cd02-4919-b29c-9770862a1d51-kube-api-access-clpxz\") pod \"barbican-db-create-8j88c\" (UID: \"222237ea-cd02-4919-b29c-9770862a1d51\") " pod="openstack/barbican-db-create-8j88c" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.148663 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71e11ad7-2fb2-4059-997a-107df77d50aa-operator-scripts\") pod \"cinder-db-create-ftxm7\" (UID: \"71e11ad7-2fb2-4059-997a-107df77d50aa\") " pod="openstack/cinder-db-create-ftxm7" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.174445 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dtq8\" (UniqueName: \"kubernetes.io/projected/71e11ad7-2fb2-4059-997a-107df77d50aa-kube-api-access-6dtq8\") pod \"cinder-db-create-ftxm7\" (UID: \"71e11ad7-2fb2-4059-997a-107df77d50aa\") " pod="openstack/cinder-db-create-ftxm7" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.236543 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-6a80-account-create-update-hngf6"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.237840 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6a80-account-create-update-hngf6" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.248384 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.249465 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706-operator-scripts\") pod \"cinder-8750-account-create-update-hh7qm\" (UID: \"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706\") " pod="openstack/cinder-8750-account-create-update-hh7qm" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.249498 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clpxz\" (UniqueName: \"kubernetes.io/projected/222237ea-cd02-4919-b29c-9770862a1d51-kube-api-access-clpxz\") pod \"barbican-db-create-8j88c\" (UID: \"222237ea-cd02-4919-b29c-9770862a1d51\") " pod="openstack/barbican-db-create-8j88c" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.249562 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/222237ea-cd02-4919-b29c-9770862a1d51-operator-scripts\") pod \"barbican-db-create-8j88c\" (UID: \"222237ea-cd02-4919-b29c-9770862a1d51\") " pod="openstack/barbican-db-create-8j88c" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.249640 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl46f\" (UniqueName: \"kubernetes.io/projected/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706-kube-api-access-rl46f\") pod \"cinder-8750-account-create-update-hh7qm\" (UID: \"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706\") " pod="openstack/cinder-8750-account-create-update-hh7qm" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.250711 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/222237ea-cd02-4919-b29c-9770862a1d51-operator-scripts\") pod \"barbican-db-create-8j88c\" (UID: \"222237ea-cd02-4919-b29c-9770862a1d51\") " pod="openstack/barbican-db-create-8j88c" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.258027 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6a80-account-create-update-hngf6"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.258507 4755 generic.go:334] "Generic (PLEG): container finished" podID="968f2997-e427-4d75-9bce-ee87f49b381d" containerID="899197e3c492a122a57091f6f598739acb6b588f83d939fe40d80667db363b0c" exitCode=0 Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.258545 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m-config-zqpxg" event={"ID":"968f2997-e427-4d75-9bce-ee87f49b381d","Type":"ContainerDied","Data":"899197e3c492a122a57091f6f598739acb6b588f83d939fe40d80667db363b0c"} Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.286958 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clpxz\" (UniqueName: \"kubernetes.io/projected/222237ea-cd02-4919-b29c-9770862a1d51-kube-api-access-clpxz\") pod \"barbican-db-create-8j88c\" (UID: \"222237ea-cd02-4919-b29c-9770862a1d51\") " pod="openstack/barbican-db-create-8j88c" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.298722 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-ftxm7" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.332554 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-create-d4c24"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.334855 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-d4c24" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.348971 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-d4c24"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.351036 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f744w\" (UniqueName: \"kubernetes.io/projected/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd-kube-api-access-f744w\") pod \"barbican-6a80-account-create-update-hngf6\" (UID: \"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd\") " pod="openstack/barbican-6a80-account-create-update-hngf6" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.351169 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd-operator-scripts\") pod \"barbican-6a80-account-create-update-hngf6\" (UID: \"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd\") " pod="openstack/barbican-6a80-account-create-update-hngf6" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.351206 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl46f\" (UniqueName: \"kubernetes.io/projected/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706-kube-api-access-rl46f\") pod \"cinder-8750-account-create-update-hh7qm\" (UID: \"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706\") " pod="openstack/cinder-8750-account-create-update-hh7qm" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.351284 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706-operator-scripts\") pod \"cinder-8750-account-create-update-hh7qm\" (UID: \"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706\") " pod="openstack/cinder-8750-account-create-update-hh7qm" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.362991 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706-operator-scripts\") pod \"cinder-8750-account-create-update-hh7qm\" (UID: \"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706\") " pod="openstack/cinder-8750-account-create-update-hh7qm" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.380501 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8j88c" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.384866 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl46f\" (UniqueName: \"kubernetes.io/projected/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706-kube-api-access-rl46f\") pod \"cinder-8750-account-create-update-hh7qm\" (UID: \"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706\") " pod="openstack/cinder-8750-account-create-update-hh7qm" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.403101 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-x247p"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.404884 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.408783 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.408972 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pvh95" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.409132 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.409784 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.423246 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-x247p"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.450439 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-ed0a-account-create-update-5rdjc"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.451798 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ed0a-account-create-update-5rdjc" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.455802 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.461087 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-8750-account-create-update-hh7qm" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.467301 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47165bb1-af4a-4e73-957e-8f0845a29841-combined-ca-bundle\") pod \"keystone-db-sync-x247p\" (UID: \"47165bb1-af4a-4e73-957e-8f0845a29841\") " pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.467403 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47165bb1-af4a-4e73-957e-8f0845a29841-config-data\") pod \"keystone-db-sync-x247p\" (UID: \"47165bb1-af4a-4e73-957e-8f0845a29841\") " pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.467430 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f744w\" (UniqueName: \"kubernetes.io/projected/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd-kube-api-access-f744w\") pod \"barbican-6a80-account-create-update-hngf6\" (UID: \"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd\") " pod="openstack/barbican-6a80-account-create-update-hngf6" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.467461 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps2vh\" (UniqueName: \"kubernetes.io/projected/0591104a-5134-4402-aa0f-4b94a79aa5df-kube-api-access-ps2vh\") pod \"cloudkitty-db-create-d4c24\" (UID: \"0591104a-5134-4402-aa0f-4b94a79aa5df\") " pod="openstack/cloudkitty-db-create-d4c24" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.467526 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd-operator-scripts\") pod \"barbican-6a80-account-create-update-hngf6\" (UID: \"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd\") " pod="openstack/barbican-6a80-account-create-update-hngf6" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.467557 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0591104a-5134-4402-aa0f-4b94a79aa5df-operator-scripts\") pod \"cloudkitty-db-create-d4c24\" (UID: \"0591104a-5134-4402-aa0f-4b94a79aa5df\") " pod="openstack/cloudkitty-db-create-d4c24" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.467579 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkt5k\" (UniqueName: \"kubernetes.io/projected/47165bb1-af4a-4e73-957e-8f0845a29841-kube-api-access-kkt5k\") pod \"keystone-db-sync-x247p\" (UID: \"47165bb1-af4a-4e73-957e-8f0845a29841\") " pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.470444 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd-operator-scripts\") pod \"barbican-6a80-account-create-update-hngf6\" (UID: \"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd\") " pod="openstack/barbican-6a80-account-create-update-hngf6" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.479276 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ed0a-account-create-update-5rdjc"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.489365 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-8flnx"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.490480 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8flnx" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.491683 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f744w\" (UniqueName: \"kubernetes.io/projected/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd-kube-api-access-f744w\") pod \"barbican-6a80-account-create-update-hngf6\" (UID: \"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd\") " pod="openstack/barbican-6a80-account-create-update-hngf6" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.549342 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-8flnx"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.559859 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6a80-account-create-update-hngf6" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.568779 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0591104a-5134-4402-aa0f-4b94a79aa5df-operator-scripts\") pod \"cloudkitty-db-create-d4c24\" (UID: \"0591104a-5134-4402-aa0f-4b94a79aa5df\") " pod="openstack/cloudkitty-db-create-d4c24" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.568823 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkt5k\" (UniqueName: \"kubernetes.io/projected/47165bb1-af4a-4e73-957e-8f0845a29841-kube-api-access-kkt5k\") pod \"keystone-db-sync-x247p\" (UID: \"47165bb1-af4a-4e73-957e-8f0845a29841\") " pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.568864 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfz82\" (UniqueName: \"kubernetes.io/projected/0329c472-ea03-47c4-a6e7-ae1f89169e6b-kube-api-access-vfz82\") pod \"neutron-db-create-8flnx\" (UID: \"0329c472-ea03-47c4-a6e7-ae1f89169e6b\") " pod="openstack/neutron-db-create-8flnx" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.568890 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0329c472-ea03-47c4-a6e7-ae1f89169e6b-operator-scripts\") pod \"neutron-db-create-8flnx\" (UID: \"0329c472-ea03-47c4-a6e7-ae1f89169e6b\") " pod="openstack/neutron-db-create-8flnx" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.568913 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47165bb1-af4a-4e73-957e-8f0845a29841-combined-ca-bundle\") pod \"keystone-db-sync-x247p\" (UID: \"47165bb1-af4a-4e73-957e-8f0845a29841\") " pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.568984 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbc7c682-a173-43e9-bca1-404c12b4b333-operator-scripts\") pod \"neutron-ed0a-account-create-update-5rdjc\" (UID: \"bbc7c682-a173-43e9-bca1-404c12b4b333\") " pod="openstack/neutron-ed0a-account-create-update-5rdjc" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.569017 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47165bb1-af4a-4e73-957e-8f0845a29841-config-data\") pod \"keystone-db-sync-x247p\" (UID: \"47165bb1-af4a-4e73-957e-8f0845a29841\") " pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.569044 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l62q6\" (UniqueName: \"kubernetes.io/projected/bbc7c682-a173-43e9-bca1-404c12b4b333-kube-api-access-l62q6\") pod \"neutron-ed0a-account-create-update-5rdjc\" (UID: \"bbc7c682-a173-43e9-bca1-404c12b4b333\") " pod="openstack/neutron-ed0a-account-create-update-5rdjc" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.569068 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps2vh\" (UniqueName: \"kubernetes.io/projected/0591104a-5134-4402-aa0f-4b94a79aa5df-kube-api-access-ps2vh\") pod \"cloudkitty-db-create-d4c24\" (UID: \"0591104a-5134-4402-aa0f-4b94a79aa5df\") " pod="openstack/cloudkitty-db-create-d4c24" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.569369 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0591104a-5134-4402-aa0f-4b94a79aa5df-operator-scripts\") pod \"cloudkitty-db-create-d4c24\" (UID: \"0591104a-5134-4402-aa0f-4b94a79aa5df\") " pod="openstack/cloudkitty-db-create-d4c24" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.573084 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47165bb1-af4a-4e73-957e-8f0845a29841-combined-ca-bundle\") pod \"keystone-db-sync-x247p\" (UID: \"47165bb1-af4a-4e73-957e-8f0845a29841\") " pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.584469 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47165bb1-af4a-4e73-957e-8f0845a29841-config-data\") pod \"keystone-db-sync-x247p\" (UID: \"47165bb1-af4a-4e73-957e-8f0845a29841\") " pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.588064 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps2vh\" (UniqueName: \"kubernetes.io/projected/0591104a-5134-4402-aa0f-4b94a79aa5df-kube-api-access-ps2vh\") pod \"cloudkitty-db-create-d4c24\" (UID: \"0591104a-5134-4402-aa0f-4b94a79aa5df\") " pod="openstack/cloudkitty-db-create-d4c24" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.588626 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkt5k\" (UniqueName: \"kubernetes.io/projected/47165bb1-af4a-4e73-957e-8f0845a29841-kube-api-access-kkt5k\") pod \"keystone-db-sync-x247p\" (UID: \"47165bb1-af4a-4e73-957e-8f0845a29841\") " pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.673294 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-d4c24" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.673898 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfz82\" (UniqueName: \"kubernetes.io/projected/0329c472-ea03-47c4-a6e7-ae1f89169e6b-kube-api-access-vfz82\") pod \"neutron-db-create-8flnx\" (UID: \"0329c472-ea03-47c4-a6e7-ae1f89169e6b\") " pod="openstack/neutron-db-create-8flnx" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.673948 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0329c472-ea03-47c4-a6e7-ae1f89169e6b-operator-scripts\") pod \"neutron-db-create-8flnx\" (UID: \"0329c472-ea03-47c4-a6e7-ae1f89169e6b\") " pod="openstack/neutron-db-create-8flnx" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.674010 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbc7c682-a173-43e9-bca1-404c12b4b333-operator-scripts\") pod \"neutron-ed0a-account-create-update-5rdjc\" (UID: \"bbc7c682-a173-43e9-bca1-404c12b4b333\") " pod="openstack/neutron-ed0a-account-create-update-5rdjc" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.674055 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l62q6\" (UniqueName: \"kubernetes.io/projected/bbc7c682-a173-43e9-bca1-404c12b4b333-kube-api-access-l62q6\") pod \"neutron-ed0a-account-create-update-5rdjc\" (UID: \"bbc7c682-a173-43e9-bca1-404c12b4b333\") " pod="openstack/neutron-ed0a-account-create-update-5rdjc" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.675132 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbc7c682-a173-43e9-bca1-404c12b4b333-operator-scripts\") pod \"neutron-ed0a-account-create-update-5rdjc\" (UID: \"bbc7c682-a173-43e9-bca1-404c12b4b333\") " pod="openstack/neutron-ed0a-account-create-update-5rdjc" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.675152 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0329c472-ea03-47c4-a6e7-ae1f89169e6b-operator-scripts\") pod \"neutron-db-create-8flnx\" (UID: \"0329c472-ea03-47c4-a6e7-ae1f89169e6b\") " pod="openstack/neutron-db-create-8flnx" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.676950 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-2fbd-account-create-update-gv4s4"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.678225 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.683707 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-db-secret" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.688746 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-2fbd-account-create-update-gv4s4"] Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.710471 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l62q6\" (UniqueName: \"kubernetes.io/projected/bbc7c682-a173-43e9-bca1-404c12b4b333-kube-api-access-l62q6\") pod \"neutron-ed0a-account-create-update-5rdjc\" (UID: \"bbc7c682-a173-43e9-bca1-404c12b4b333\") " pod="openstack/neutron-ed0a-account-create-update-5rdjc" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.714545 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfz82\" (UniqueName: \"kubernetes.io/projected/0329c472-ea03-47c4-a6e7-ae1f89169e6b-kube-api-access-vfz82\") pod \"neutron-db-create-8flnx\" (UID: \"0329c472-ea03-47c4-a6e7-ae1f89169e6b\") " pod="openstack/neutron-db-create-8flnx" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.737835 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.775806 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49ca39dc-75b9-48e7-a8c6-2b405d10a219-operator-scripts\") pod \"cloudkitty-2fbd-account-create-update-gv4s4\" (UID: \"49ca39dc-75b9-48e7-a8c6-2b405d10a219\") " pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.775946 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdc7b\" (UniqueName: \"kubernetes.io/projected/49ca39dc-75b9-48e7-a8c6-2b405d10a219-kube-api-access-bdc7b\") pod \"cloudkitty-2fbd-account-create-update-gv4s4\" (UID: \"49ca39dc-75b9-48e7-a8c6-2b405d10a219\") " pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.782159 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ed0a-account-create-update-5rdjc" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.843801 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8flnx" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.878017 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49ca39dc-75b9-48e7-a8c6-2b405d10a219-operator-scripts\") pod \"cloudkitty-2fbd-account-create-update-gv4s4\" (UID: \"49ca39dc-75b9-48e7-a8c6-2b405d10a219\") " pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.878141 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdc7b\" (UniqueName: \"kubernetes.io/projected/49ca39dc-75b9-48e7-a8c6-2b405d10a219-kube-api-access-bdc7b\") pod \"cloudkitty-2fbd-account-create-update-gv4s4\" (UID: \"49ca39dc-75b9-48e7-a8c6-2b405d10a219\") " pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.879235 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49ca39dc-75b9-48e7-a8c6-2b405d10a219-operator-scripts\") pod \"cloudkitty-2fbd-account-create-update-gv4s4\" (UID: \"49ca39dc-75b9-48e7-a8c6-2b405d10a219\") " pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" Feb 02 22:53:08 crc kubenswrapper[4755]: I0202 22:53:08.895820 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdc7b\" (UniqueName: \"kubernetes.io/projected/49ca39dc-75b9-48e7-a8c6-2b405d10a219-kube-api-access-bdc7b\") pod \"cloudkitty-2fbd-account-create-update-gv4s4\" (UID: \"49ca39dc-75b9-48e7-a8c6-2b405d10a219\") " pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" Feb 02 22:53:09 crc kubenswrapper[4755]: I0202 22:53:09.063084 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" Feb 02 22:53:10 crc kubenswrapper[4755]: I0202 22:53:10.709250 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-jdd7m" Feb 02 22:53:13 crc kubenswrapper[4755]: I0202 22:53:13.145059 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.114:9090/-/ready\": dial tcp 10.217.0.114:9090: connect: connection refused" Feb 02 22:53:13 crc kubenswrapper[4755]: I0202 22:53:13.370433 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="5717cdb5-a227-4975-b808-068f0ace63c5" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Feb 02 22:53:14 crc kubenswrapper[4755]: I0202 22:53:14.984821 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:53:14 crc kubenswrapper[4755]: I0202 22:53:14.994686 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/50a165a2-aeeb-4f83-9af3-a33f76b34a39-etc-swift\") pod \"swift-storage-0\" (UID: \"50a165a2-aeeb-4f83-9af3-a33f76b34a39\") " pod="openstack/swift-storage-0" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.233377 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.475049 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.495306 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-run\") pod \"968f2997-e427-4d75-9bce-ee87f49b381d\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.495363 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-run-ovn\") pod \"968f2997-e427-4d75-9bce-ee87f49b381d\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.495527 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/968f2997-e427-4d75-9bce-ee87f49b381d-scripts\") pod \"968f2997-e427-4d75-9bce-ee87f49b381d\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.495554 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-log-ovn\") pod \"968f2997-e427-4d75-9bce-ee87f49b381d\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.495579 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vgvc\" (UniqueName: \"kubernetes.io/projected/968f2997-e427-4d75-9bce-ee87f49b381d-kube-api-access-4vgvc\") pod \"968f2997-e427-4d75-9bce-ee87f49b381d\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.495744 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/968f2997-e427-4d75-9bce-ee87f49b381d-additional-scripts\") pod \"968f2997-e427-4d75-9bce-ee87f49b381d\" (UID: \"968f2997-e427-4d75-9bce-ee87f49b381d\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.496625 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/968f2997-e427-4d75-9bce-ee87f49b381d-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "968f2997-e427-4d75-9bce-ee87f49b381d" (UID: "968f2997-e427-4d75-9bce-ee87f49b381d"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.496878 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-run" (OuterVolumeSpecName: "var-run") pod "968f2997-e427-4d75-9bce-ee87f49b381d" (UID: "968f2997-e427-4d75-9bce-ee87f49b381d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.496926 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "968f2997-e427-4d75-9bce-ee87f49b381d" (UID: "968f2997-e427-4d75-9bce-ee87f49b381d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.496987 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "968f2997-e427-4d75-9bce-ee87f49b381d" (UID: "968f2997-e427-4d75-9bce-ee87f49b381d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.497263 4755 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-run\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.497285 4755 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.497297 4755 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/968f2997-e427-4d75-9bce-ee87f49b381d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.497307 4755 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/968f2997-e427-4d75-9bce-ee87f49b381d-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.498414 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/968f2997-e427-4d75-9bce-ee87f49b381d-scripts" (OuterVolumeSpecName: "scripts") pod "968f2997-e427-4d75-9bce-ee87f49b381d" (UID: "968f2997-e427-4d75-9bce-ee87f49b381d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.508023 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/968f2997-e427-4d75-9bce-ee87f49b381d-kube-api-access-4vgvc" (OuterVolumeSpecName: "kube-api-access-4vgvc") pod "968f2997-e427-4d75-9bce-ee87f49b381d" (UID: "968f2997-e427-4d75-9bce-ee87f49b381d"). InnerVolumeSpecName "kube-api-access-4vgvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.603146 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/968f2997-e427-4d75-9bce-ee87f49b381d-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.603173 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vgvc\" (UniqueName: \"kubernetes.io/projected/968f2997-e427-4d75-9bce-ee87f49b381d-kube-api-access-4vgvc\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.663151 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.703716 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-tls-assets\") pod \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.703899 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-thanos-prometheus-http-client-file\") pod \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.703926 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-web-config\") pod \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.703947 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tl8nt\" (UniqueName: \"kubernetes.io/projected/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-kube-api-access-tl8nt\") pod \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.704005 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-2\") pod \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.704030 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-0\") pod \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.704080 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-1\") pod \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.704117 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-config\") pod \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.704217 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\") pod \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.704254 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-config-out\") pod \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\" (UID: \"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc\") " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.705530 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-2" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-2") pod "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" (UID: "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-2". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.706595 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-1" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-1") pod "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" (UID: "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.707385 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" (UID: "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.712048 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" (UID: "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.713369 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" (UID: "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.713565 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-config" (OuterVolumeSpecName: "config") pod "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" (UID: "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.713933 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-kube-api-access-tl8nt" (OuterVolumeSpecName: "kube-api-access-tl8nt") pod "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" (UID: "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc"). InnerVolumeSpecName "kube-api-access-tl8nt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.735022 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-config-out" (OuterVolumeSpecName: "config-out") pod "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" (UID: "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.758681 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" (UID: "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc"). InnerVolumeSpecName "pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.789532 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-web-config" (OuterVolumeSpecName: "web-config") pod "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" (UID: "dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.807717 4755 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.807760 4755 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-web-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.807773 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tl8nt\" (UniqueName: \"kubernetes.io/projected/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-kube-api-access-tl8nt\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.807783 4755 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-2\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.807794 4755 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.807804 4755 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-prometheus-metric-storage-rulefiles-1\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.807813 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.807842 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\") on node \"crc\" " Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.807856 4755 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-config-out\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.807865 4755 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc-tls-assets\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.832433 4755 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.832567 4755 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68") on node "crc" Feb 02 22:53:15 crc kubenswrapper[4755]: I0202 22:53:15.909134 4755 reconciler_common.go:293] "Volume detached for volume \"pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.025121 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-8750-account-create-update-hh7qm"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.041127 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ed0a-account-create-update-5rdjc"] Feb 02 22:53:16 crc kubenswrapper[4755]: W0202 22:53:16.082904 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e05b09d_5b2a_4b6a_8ff9_dbbe19018706.slice/crio-edea3ab2c6dff6157bf702fbb8bf33df88b635610ba5ab92866dd4bc789cfe91 WatchSource:0}: Error finding container edea3ab2c6dff6157bf702fbb8bf33df88b635610ba5ab92866dd4bc789cfe91: Status 404 returned error can't find the container with id edea3ab2c6dff6157bf702fbb8bf33df88b635610ba5ab92866dd4bc789cfe91 Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.314319 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6a80-account-create-update-hngf6"] Feb 02 22:53:16 crc kubenswrapper[4755]: W0202 22:53:16.317163 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod222237ea_cd02_4919_b29c_9770862a1d51.slice/crio-6e74d102151fba0c1f6fe925faeddceef8ef2d2d576327b359ef24b84d9064a9 WatchSource:0}: Error finding container 6e74d102151fba0c1f6fe925faeddceef8ef2d2d576327b359ef24b84d9064a9: Status 404 returned error can't find the container with id 6e74d102151fba0c1f6fe925faeddceef8ef2d2d576327b359ef24b84d9064a9 Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.322627 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-8j88c"] Feb 02 22:53:16 crc kubenswrapper[4755]: W0202 22:53:16.326327 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71e11ad7_2fb2_4059_997a_107df77d50aa.slice/crio-6bf8aa48de4c2a4f02831ea3a6e65c9100383b316bec7dfc0015efb67f96698a WatchSource:0}: Error finding container 6bf8aa48de4c2a4f02831ea3a6e65c9100383b316bec7dfc0015efb67f96698a: Status 404 returned error can't find the container with id 6bf8aa48de4c2a4f02831ea3a6e65c9100383b316bec7dfc0015efb67f96698a Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.340740 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-ftxm7"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.353587 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-x247p"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.360715 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-x247p" event={"ID":"47165bb1-af4a-4e73-957e-8f0845a29841","Type":"ContainerStarted","Data":"383720aa356499797f5f6966bf3fca2f49f8124c0e31fef0241b913e2d121844"} Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.375300 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-ftxm7" event={"ID":"71e11ad7-2fb2-4059-997a-107df77d50aa","Type":"ContainerStarted","Data":"6bf8aa48de4c2a4f02831ea3a6e65c9100383b316bec7dfc0015efb67f96698a"} Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.377688 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6a80-account-create-update-hngf6" event={"ID":"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd","Type":"ContainerStarted","Data":"015b428006324f9d2983dcccd703f6124bb5de9767a6faf2fa8bb49c264a4516"} Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.383255 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ed0a-account-create-update-5rdjc" event={"ID":"bbc7c682-a173-43e9-bca1-404c12b4b333","Type":"ContainerStarted","Data":"e320b160a226c03c34cee7f4e7ff6b4857443e7493fb0041e5d6ae529964e5ad"} Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.392514 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc","Type":"ContainerDied","Data":"eb055b349b8efb98665c41f6e33ad0828915b0237e824775cf8f80781fb851a8"} Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.392568 4755 scope.go:117] "RemoveContainer" containerID="2dba2f60b197315b9c9aeecc54c2dfba32b69489d7b7279575c7b88354eafc52" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.392549 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.397069 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.400508 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m-config-zqpxg" event={"ID":"968f2997-e427-4d75-9bce-ee87f49b381d","Type":"ContainerDied","Data":"03b253185bbdeae6d6087631a854282cd32f85c45c96f724cc9614caf23e193a"} Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.400539 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03b253185bbdeae6d6087631a854282cd32f85c45c96f724cc9614caf23e193a" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.400598 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m-config-zqpxg" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.408523 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8j88c" event={"ID":"222237ea-cd02-4919-b29c-9770862a1d51","Type":"ContainerStarted","Data":"6e74d102151fba0c1f6fe925faeddceef8ef2d2d576327b359ef24b84d9064a9"} Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.415959 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-8750-account-create-update-hh7qm" event={"ID":"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706","Type":"ContainerStarted","Data":"edea3ab2c6dff6157bf702fbb8bf33df88b635610ba5ab92866dd4bc789cfe91"} Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.440427 4755 scope.go:117] "RemoveContainer" containerID="4184e09c3258912403f80e83b518732aa855d8b2b2381ba02eb5a280b06c9b83" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.445963 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.453526 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.512052 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 02 22:53:16 crc kubenswrapper[4755]: E0202 22:53:16.512438 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="config-reloader" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.512453 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="config-reloader" Feb 02 22:53:16 crc kubenswrapper[4755]: E0202 22:53:16.512473 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="prometheus" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.512481 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="prometheus" Feb 02 22:53:16 crc kubenswrapper[4755]: E0202 22:53:16.512491 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="init-config-reloader" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.512498 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="init-config-reloader" Feb 02 22:53:16 crc kubenswrapper[4755]: E0202 22:53:16.512512 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="thanos-sidecar" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.512518 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="thanos-sidecar" Feb 02 22:53:16 crc kubenswrapper[4755]: E0202 22:53:16.512540 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="968f2997-e427-4d75-9bce-ee87f49b381d" containerName="ovn-config" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.512549 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="968f2997-e427-4d75-9bce-ee87f49b381d" containerName="ovn-config" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.512780 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="config-reloader" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.512795 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="prometheus" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.512810 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="968f2997-e427-4d75-9bce-ee87f49b381d" containerName="ovn-config" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.512821 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" containerName="thanos-sidecar" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.519192 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.523037 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-8flnx"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.529423 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.529602 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.529707 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.529861 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-cwccx" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.529932 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.529956 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.530050 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.530405 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.538221 4755 scope.go:117] "RemoveContainer" containerID="e0dccd2ede44321dc0c2f4d658b5bed9355bae202ba13b3d4afc97e82c8d08fa" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.539477 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-2fbd-account-create-update-gv4s4"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.544267 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Feb 02 22:53:16 crc kubenswrapper[4755]: W0202 22:53:16.545199 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0329c472_ea03_47c4_a6e7_ae1f89169e6b.slice/crio-d6bd5ab2420a97988468320b5e9b8eef9b09fef019e5114a1413e41c82a0cc84 WatchSource:0}: Error finding container d6bd5ab2420a97988468320b5e9b8eef9b09fef019e5114a1413e41c82a0cc84: Status 404 returned error can't find the container with id d6bd5ab2420a97988468320b5e9b8eef9b09fef019e5114a1413e41c82a0cc84 Feb 02 22:53:16 crc kubenswrapper[4755]: W0202 22:53:16.545433 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49ca39dc_75b9_48e7_a8c6_2b405d10a219.slice/crio-39f8956eccafb4ff9120842fbf3706db9d4d04ff0fa5f137e616ac792995f525 WatchSource:0}: Error finding container 39f8956eccafb4ff9120842fbf3706db9d4d04ff0fa5f137e616ac792995f525: Status 404 returned error can't find the container with id 39f8956eccafb4ff9120842fbf3706db9d4d04ff0fa5f137e616ac792995f525 Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.548506 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.584309 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-d4c24"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.651370 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-l5tks"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736184 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736247 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/56e97e05-60e4-4c71-b081-18bb5dde670b-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736273 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/56e97e05-60e4-4c71-b081-18bb5dde670b-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736310 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736339 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/56e97e05-60e4-4c71-b081-18bb5dde670b-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736369 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-config\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736391 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/56e97e05-60e4-4c71-b081-18bb5dde670b-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736413 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736445 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736473 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736503 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/56e97e05-60e4-4c71-b081-18bb5dde670b-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736531 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.736582 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6znz\" (UniqueName: \"kubernetes.io/projected/56e97e05-60e4-4c71-b081-18bb5dde670b-kube-api-access-g6znz\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.742929 4755 scope.go:117] "RemoveContainer" containerID="e115599dcb652a384d5f72adc533a042b0ef4ae64e3fc1015e64d204f93e6195" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.789410 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jdd7m-config-zqpxg"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.818906 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jdd7m-config-zqpxg"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.826694 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jdd7m-config-8628s"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.827821 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.829534 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.835853 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jdd7m-config-8628s"] Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.837697 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.837815 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.837851 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.837882 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/56e97e05-60e4-4c71-b081-18bb5dde670b-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.837910 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.843471 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6znz\" (UniqueName: \"kubernetes.io/projected/56e97e05-60e4-4c71-b081-18bb5dde670b-kube-api-access-g6znz\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.843747 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.843837 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/56e97e05-60e4-4c71-b081-18bb5dde670b-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.843865 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/56e97e05-60e4-4c71-b081-18bb5dde670b-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.845197 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.845247 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/56e97e05-60e4-4c71-b081-18bb5dde670b-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.845291 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-config\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.845312 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/56e97e05-60e4-4c71-b081-18bb5dde670b-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.845581 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/56e97e05-60e4-4c71-b081-18bb5dde670b-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.859127 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/56e97e05-60e4-4c71-b081-18bb5dde670b-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.861384 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/56e97e05-60e4-4c71-b081-18bb5dde670b-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.867341 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.867705 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.867788 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/56e97e05-60e4-4c71-b081-18bb5dde670b-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.867807 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.869403 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/56e97e05-60e4-4c71-b081-18bb5dde670b-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.869430 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6znz\" (UniqueName: \"kubernetes.io/projected/56e97e05-60e4-4c71-b081-18bb5dde670b-kube-api-access-g6znz\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.869479 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.870291 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-config\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.880515 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.880567 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f9096daf592eed82232c3beba2134ba1e13b31cbf7a7e87be279794dff9df4fb/globalmount\"" pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.880514 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e97e05-60e4-4c71-b081-18bb5dde670b-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.949604 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4vfj\" (UniqueName: \"kubernetes.io/projected/ed94a520-b8d1-4f49-97a4-12f97b6c814d-kube-api-access-h4vfj\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.949904 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-run-ovn\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.949942 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-run\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.949965 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ed94a520-b8d1-4f49-97a4-12f97b6c814d-additional-scripts\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.950027 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-log-ovn\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.950065 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed94a520-b8d1-4f49-97a4-12f97b6c814d-scripts\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:16 crc kubenswrapper[4755]: I0202 22:53:16.972426 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b3fbe30b-561b-44be-a7df-d453cdbeac68\") pod \"prometheus-metric-storage-0\" (UID: \"56e97e05-60e4-4c71-b081-18bb5dde670b\") " pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.051286 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-log-ovn\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.051403 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed94a520-b8d1-4f49-97a4-12f97b6c814d-scripts\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.051482 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4vfj\" (UniqueName: \"kubernetes.io/projected/ed94a520-b8d1-4f49-97a4-12f97b6c814d-kube-api-access-h4vfj\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.051528 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-run-ovn\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.051550 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-run\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.051569 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ed94a520-b8d1-4f49-97a4-12f97b6c814d-additional-scripts\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.052190 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ed94a520-b8d1-4f49-97a4-12f97b6c814d-additional-scripts\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.052320 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-run-ovn\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.052355 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-run\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.052392 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-log-ovn\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.054668 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed94a520-b8d1-4f49-97a4-12f97b6c814d-scripts\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.072495 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4vfj\" (UniqueName: \"kubernetes.io/projected/ed94a520-b8d1-4f49-97a4-12f97b6c814d-kube-api-access-h4vfj\") pod \"ovn-controller-jdd7m-config-8628s\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.089851 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="968f2997-e427-4d75-9bce-ee87f49b381d" path="/var/lib/kubelet/pods/968f2997-e427-4d75-9bce-ee87f49b381d/volumes" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.090613 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc" path="/var/lib/kubelet/pods/dd0d6aec-8a89-42b1-a7e3-56c3a08e64cc/volumes" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.187043 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.221008 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.436139 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" event={"ID":"49ca39dc-75b9-48e7-a8c6-2b405d10a219","Type":"ContainerStarted","Data":"d092ca52d36f228841cd93f4785f52915e51a33c8cd3f9895ea67c0cf3520843"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.436599 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" event={"ID":"49ca39dc-75b9-48e7-a8c6-2b405d10a219","Type":"ContainerStarted","Data":"39f8956eccafb4ff9120842fbf3706db9d4d04ff0fa5f137e616ac792995f525"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.445514 4755 generic.go:334] "Generic (PLEG): container finished" podID="bbc7c682-a173-43e9-bca1-404c12b4b333" containerID="3d16d5af66ed73c47119bff4be610c1f40ad459c5adc302a3f2e9e788bc47598" exitCode=0 Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.445655 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ed0a-account-create-update-5rdjc" event={"ID":"bbc7c682-a173-43e9-bca1-404c12b4b333","Type":"ContainerDied","Data":"3d16d5af66ed73c47119bff4be610c1f40ad459c5adc302a3f2e9e788bc47598"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.450150 4755 generic.go:334] "Generic (PLEG): container finished" podID="7e05b09d-5b2a-4b6a-8ff9-dbbe19018706" containerID="94f0599d9e1c444580ae409efc65d62f2f4e4e60c54210f2d95cd84bd7eec41b" exitCode=0 Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.450206 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-8750-account-create-update-hh7qm" event={"ID":"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706","Type":"ContainerDied","Data":"94f0599d9e1c444580ae409efc65d62f2f4e4e60c54210f2d95cd84bd7eec41b"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.453097 4755 generic.go:334] "Generic (PLEG): container finished" podID="0591104a-5134-4402-aa0f-4b94a79aa5df" containerID="7b189a7fb96caf3325c5394a26afe2bf80760acdd0bff5a3f2a7ad16a5e23074" exitCode=0 Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.453138 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-d4c24" event={"ID":"0591104a-5134-4402-aa0f-4b94a79aa5df","Type":"ContainerDied","Data":"7b189a7fb96caf3325c5394a26afe2bf80760acdd0bff5a3f2a7ad16a5e23074"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.453191 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-d4c24" event={"ID":"0591104a-5134-4402-aa0f-4b94a79aa5df","Type":"ContainerStarted","Data":"08cafa46816df85759d7b2ea545995e763b3307134fb4d07d0c696f5fb4a5b86"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.454924 4755 generic.go:334] "Generic (PLEG): container finished" podID="0329c472-ea03-47c4-a6e7-ae1f89169e6b" containerID="cbe5dd77be314a6e9d29b7d1de91006f88ae3f2b97d528bda6e8ea07549aaaa9" exitCode=0 Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.455013 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8flnx" event={"ID":"0329c472-ea03-47c4-a6e7-ae1f89169e6b","Type":"ContainerDied","Data":"cbe5dd77be314a6e9d29b7d1de91006f88ae3f2b97d528bda6e8ea07549aaaa9"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.455042 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8flnx" event={"ID":"0329c472-ea03-47c4-a6e7-ae1f89169e6b","Type":"ContainerStarted","Data":"d6bd5ab2420a97988468320b5e9b8eef9b09fef019e5114a1413e41c82a0cc84"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.462412 4755 generic.go:334] "Generic (PLEG): container finished" podID="222237ea-cd02-4919-b29c-9770862a1d51" containerID="f8d6ac97ab0fb1c0cd200baf81a8b1624050135da7f88fdfe94d99fc391d9a19" exitCode=0 Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.462512 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8j88c" event={"ID":"222237ea-cd02-4919-b29c-9770862a1d51","Type":"ContainerDied","Data":"f8d6ac97ab0fb1c0cd200baf81a8b1624050135da7f88fdfe94d99fc391d9a19"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.464685 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-fdxn5" event={"ID":"02b6b0aa-4f58-4de4-83b7-c3291e005325","Type":"ContainerStarted","Data":"2e512cc897c06d8cdd94d0c3021b290bde7f9250a0d58d86a9b882dc4178d456"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.466613 4755 generic.go:334] "Generic (PLEG): container finished" podID="71e11ad7-2fb2-4059-997a-107df77d50aa" containerID="799487a328697fc12fc93257bc5606057c3a99dea78b6ba2ea7403f33c04099c" exitCode=0 Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.466775 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-ftxm7" event={"ID":"71e11ad7-2fb2-4059-997a-107df77d50aa","Type":"ContainerDied","Data":"799487a328697fc12fc93257bc5606057c3a99dea78b6ba2ea7403f33c04099c"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.468435 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"95f4aa1a621d2eb3d9cfa97860dab6a2ca82e7ac619adaa3c5f63ac29058f529"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.469829 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-l5tks" event={"ID":"76950441-48ad-4562-bbf6-15fd01c6d2d4","Type":"ContainerStarted","Data":"7aabbf2e1f23dd6de8aa371f3ec6a15a6ef91c01c78a3530b65aac3e04fb72d7"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.469862 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-l5tks" event={"ID":"76950441-48ad-4562-bbf6-15fd01c6d2d4","Type":"ContainerStarted","Data":"1076bd61d0974ddfe6c639a38e1c3981973232ab6806e02191719987eaed5596"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.471007 4755 generic.go:334] "Generic (PLEG): container finished" podID="82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd" containerID="04973ba3e744473c95923eba257bd6d357aa2bdaf2b0ed653785bddc20928783" exitCode=0 Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.471054 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6a80-account-create-update-hngf6" event={"ID":"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd","Type":"ContainerDied","Data":"04973ba3e744473c95923eba257bd6d357aa2bdaf2b0ed653785bddc20928783"} Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.518407 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-fdxn5" podStartSLOduration=3.768166225 podStartE2EDuration="17.518386339s" podCreationTimestamp="2026-02-02 22:53:00 +0000 UTC" firstStartedPulling="2026-02-02 22:53:01.750460492 +0000 UTC m=+1137.441680818" lastFinishedPulling="2026-02-02 22:53:15.500680596 +0000 UTC m=+1151.191900932" observedRunningTime="2026-02-02 22:53:17.510776048 +0000 UTC m=+1153.201996374" watchObservedRunningTime="2026-02-02 22:53:17.518386339 +0000 UTC m=+1153.209606665" Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.700119 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jdd7m-config-8628s"] Feb 02 22:53:17 crc kubenswrapper[4755]: I0202 22:53:17.848696 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Feb 02 22:53:17 crc kubenswrapper[4755]: W0202 22:53:17.973967 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56e97e05_60e4_4c71_b081_18bb5dde670b.slice/crio-f9c1e68c1b568a8130af1e5eb125ec81b1862e7bba34c148275a7334a3adc707 WatchSource:0}: Error finding container f9c1e68c1b568a8130af1e5eb125ec81b1862e7bba34c148275a7334a3adc707: Status 404 returned error can't find the container with id f9c1e68c1b568a8130af1e5eb125ec81b1862e7bba34c148275a7334a3adc707 Feb 02 22:53:18 crc kubenswrapper[4755]: I0202 22:53:18.481507 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"8048b0f9cadeee44b95967d91224673d3a6d0bc050fe0c0ff0e9c2647d0eea94"} Feb 02 22:53:18 crc kubenswrapper[4755]: I0202 22:53:18.488201 4755 generic.go:334] "Generic (PLEG): container finished" podID="76950441-48ad-4562-bbf6-15fd01c6d2d4" containerID="7aabbf2e1f23dd6de8aa371f3ec6a15a6ef91c01c78a3530b65aac3e04fb72d7" exitCode=0 Feb 02 22:53:18 crc kubenswrapper[4755]: I0202 22:53:18.488303 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-l5tks" event={"ID":"76950441-48ad-4562-bbf6-15fd01c6d2d4","Type":"ContainerDied","Data":"7aabbf2e1f23dd6de8aa371f3ec6a15a6ef91c01c78a3530b65aac3e04fb72d7"} Feb 02 22:53:18 crc kubenswrapper[4755]: I0202 22:53:18.490480 4755 generic.go:334] "Generic (PLEG): container finished" podID="49ca39dc-75b9-48e7-a8c6-2b405d10a219" containerID="d092ca52d36f228841cd93f4785f52915e51a33c8cd3f9895ea67c0cf3520843" exitCode=0 Feb 02 22:53:18 crc kubenswrapper[4755]: I0202 22:53:18.490521 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" event={"ID":"49ca39dc-75b9-48e7-a8c6-2b405d10a219","Type":"ContainerDied","Data":"d092ca52d36f228841cd93f4785f52915e51a33c8cd3f9895ea67c0cf3520843"} Feb 02 22:53:18 crc kubenswrapper[4755]: I0202 22:53:18.492244 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m-config-8628s" event={"ID":"ed94a520-b8d1-4f49-97a4-12f97b6c814d","Type":"ContainerStarted","Data":"5a3c83c20a93b7f8d77a81949e4e79b7a9af74806ed404a68379953d349e6b8b"} Feb 02 22:53:18 crc kubenswrapper[4755]: I0202 22:53:18.492272 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m-config-8628s" event={"ID":"ed94a520-b8d1-4f49-97a4-12f97b6c814d","Type":"ContainerStarted","Data":"8d66cb69c841b51160ee04b5a18abbd6e2833940ea6e03f2441ad1e2d9241b93"} Feb 02 22:53:18 crc kubenswrapper[4755]: I0202 22:53:18.494309 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"56e97e05-60e4-4c71-b081-18bb5dde670b","Type":"ContainerStarted","Data":"f9c1e68c1b568a8130af1e5eb125ec81b1862e7bba34c148275a7334a3adc707"} Feb 02 22:53:18 crc kubenswrapper[4755]: I0202 22:53:18.516176 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-jdd7m-config-8628s" podStartSLOduration=2.516157127 podStartE2EDuration="2.516157127s" podCreationTimestamp="2026-02-02 22:53:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:53:18.514031028 +0000 UTC m=+1154.205251364" watchObservedRunningTime="2026-02-02 22:53:18.516157127 +0000 UTC m=+1154.207377453" Feb 02 22:53:19 crc kubenswrapper[4755]: I0202 22:53:19.504593 4755 generic.go:334] "Generic (PLEG): container finished" podID="ed94a520-b8d1-4f49-97a4-12f97b6c814d" containerID="5a3c83c20a93b7f8d77a81949e4e79b7a9af74806ed404a68379953d349e6b8b" exitCode=0 Feb 02 22:53:19 crc kubenswrapper[4755]: I0202 22:53:19.504662 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m-config-8628s" event={"ID":"ed94a520-b8d1-4f49-97a4-12f97b6c814d","Type":"ContainerDied","Data":"5a3c83c20a93b7f8d77a81949e4e79b7a9af74806ed404a68379953d349e6b8b"} Feb 02 22:53:19 crc kubenswrapper[4755]: I0202 22:53:19.507508 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"5be4ffd64cf8590f50c94852a6363144c1bd91434678693beba7686da39e3015"} Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.485704 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-8750-account-create-update-hh7qm" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.525472 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6a80-account-create-update-hngf6" event={"ID":"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd","Type":"ContainerDied","Data":"015b428006324f9d2983dcccd703f6124bb5de9767a6faf2fa8bb49c264a4516"} Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.525510 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="015b428006324f9d2983dcccd703f6124bb5de9767a6faf2fa8bb49c264a4516" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.527452 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8flnx" event={"ID":"0329c472-ea03-47c4-a6e7-ae1f89169e6b","Type":"ContainerDied","Data":"d6bd5ab2420a97988468320b5e9b8eef9b09fef019e5114a1413e41c82a0cc84"} Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.527473 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6bd5ab2420a97988468320b5e9b8eef9b09fef019e5114a1413e41c82a0cc84" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.529195 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" event={"ID":"49ca39dc-75b9-48e7-a8c6-2b405d10a219","Type":"ContainerDied","Data":"39f8956eccafb4ff9120842fbf3706db9d4d04ff0fa5f137e616ac792995f525"} Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.529215 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39f8956eccafb4ff9120842fbf3706db9d4d04ff0fa5f137e616ac792995f525" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.530956 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-8j88c" event={"ID":"222237ea-cd02-4919-b29c-9770862a1d51","Type":"ContainerDied","Data":"6e74d102151fba0c1f6fe925faeddceef8ef2d2d576327b359ef24b84d9064a9"} Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.530990 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e74d102151fba0c1f6fe925faeddceef8ef2d2d576327b359ef24b84d9064a9" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.532822 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-8750-account-create-update-hh7qm" event={"ID":"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706","Type":"ContainerDied","Data":"edea3ab2c6dff6157bf702fbb8bf33df88b635610ba5ab92866dd4bc789cfe91"} Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.532843 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="edea3ab2c6dff6157bf702fbb8bf33df88b635610ba5ab92866dd4bc789cfe91" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.532893 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-8750-account-create-update-hh7qm" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.534850 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-d4c24" event={"ID":"0591104a-5134-4402-aa0f-4b94a79aa5df","Type":"ContainerDied","Data":"08cafa46816df85759d7b2ea545995e763b3307134fb4d07d0c696f5fb4a5b86"} Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.534872 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08cafa46816df85759d7b2ea545995e763b3307134fb4d07d0c696f5fb4a5b86" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.536609 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-ftxm7" event={"ID":"71e11ad7-2fb2-4059-997a-107df77d50aa","Type":"ContainerDied","Data":"6bf8aa48de4c2a4f02831ea3a6e65c9100383b316bec7dfc0015efb67f96698a"} Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.536629 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6bf8aa48de4c2a4f02831ea3a6e65c9100383b316bec7dfc0015efb67f96698a" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.538467 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-l5tks" event={"ID":"76950441-48ad-4562-bbf6-15fd01c6d2d4","Type":"ContainerDied","Data":"1076bd61d0974ddfe6c639a38e1c3981973232ab6806e02191719987eaed5596"} Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.538487 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1076bd61d0974ddfe6c639a38e1c3981973232ab6806e02191719987eaed5596" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.540212 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ed0a-account-create-update-5rdjc" event={"ID":"bbc7c682-a173-43e9-bca1-404c12b4b333","Type":"ContainerDied","Data":"e320b160a226c03c34cee7f4e7ff6b4857443e7493fb0041e5d6ae529964e5ad"} Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.540232 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e320b160a226c03c34cee7f4e7ff6b4857443e7493fb0041e5d6ae529964e5ad" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.542320 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m-config-8628s" event={"ID":"ed94a520-b8d1-4f49-97a4-12f97b6c814d","Type":"ContainerDied","Data":"8d66cb69c841b51160ee04b5a18abbd6e2833940ea6e03f2441ad1e2d9241b93"} Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.542343 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d66cb69c841b51160ee04b5a18abbd6e2833940ea6e03f2441ad1e2d9241b93" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.544182 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"56e97e05-60e4-4c71-b081-18bb5dde670b","Type":"ContainerStarted","Data":"18a426a34d3ccdc567f84e25ebfd124daab172df4d3fa4b9213c7bc5b2a4a44f"} Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.555066 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8flnx" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.604228 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ed0a-account-create-update-5rdjc" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.618174 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.631255 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-ftxm7" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.654140 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-l5tks" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.654366 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfz82\" (UniqueName: \"kubernetes.io/projected/0329c472-ea03-47c4-a6e7-ae1f89169e6b-kube-api-access-vfz82\") pod \"0329c472-ea03-47c4-a6e7-ae1f89169e6b\" (UID: \"0329c472-ea03-47c4-a6e7-ae1f89169e6b\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.654547 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0329c472-ea03-47c4-a6e7-ae1f89169e6b-operator-scripts\") pod \"0329c472-ea03-47c4-a6e7-ae1f89169e6b\" (UID: \"0329c472-ea03-47c4-a6e7-ae1f89169e6b\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.654601 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rl46f\" (UniqueName: \"kubernetes.io/projected/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706-kube-api-access-rl46f\") pod \"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706\" (UID: \"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.654669 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706-operator-scripts\") pod \"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706\" (UID: \"7e05b09d-5b2a-4b6a-8ff9-dbbe19018706\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.658069 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7e05b09d-5b2a-4b6a-8ff9-dbbe19018706" (UID: "7e05b09d-5b2a-4b6a-8ff9-dbbe19018706"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.659017 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0329c472-ea03-47c4-a6e7-ae1f89169e6b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0329c472-ea03-47c4-a6e7-ae1f89169e6b" (UID: "0329c472-ea03-47c4-a6e7-ae1f89169e6b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.661196 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0329c472-ea03-47c4-a6e7-ae1f89169e6b-kube-api-access-vfz82" (OuterVolumeSpecName: "kube-api-access-vfz82") pod "0329c472-ea03-47c4-a6e7-ae1f89169e6b" (UID: "0329c472-ea03-47c4-a6e7-ae1f89169e6b"). InnerVolumeSpecName "kube-api-access-vfz82". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.661664 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706-kube-api-access-rl46f" (OuterVolumeSpecName: "kube-api-access-rl46f") pod "7e05b09d-5b2a-4b6a-8ff9-dbbe19018706" (UID: "7e05b09d-5b2a-4b6a-8ff9-dbbe19018706"). InnerVolumeSpecName "kube-api-access-rl46f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.667525 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.724879 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8j88c" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.736183 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-d4c24" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.751515 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6a80-account-create-update-hngf6" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.756407 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-log-ovn\") pod \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.756551 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4vfj\" (UniqueName: \"kubernetes.io/projected/ed94a520-b8d1-4f49-97a4-12f97b6c814d-kube-api-access-h4vfj\") pod \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.756585 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l62q6\" (UniqueName: \"kubernetes.io/projected/bbc7c682-a173-43e9-bca1-404c12b4b333-kube-api-access-l62q6\") pod \"bbc7c682-a173-43e9-bca1-404c12b4b333\" (UID: \"bbc7c682-a173-43e9-bca1-404c12b4b333\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.756631 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76950441-48ad-4562-bbf6-15fd01c6d2d4-operator-scripts\") pod \"76950441-48ad-4562-bbf6-15fd01c6d2d4\" (UID: \"76950441-48ad-4562-bbf6-15fd01c6d2d4\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.756656 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-run\") pod \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.756681 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71e11ad7-2fb2-4059-997a-107df77d50aa-operator-scripts\") pod \"71e11ad7-2fb2-4059-997a-107df77d50aa\" (UID: \"71e11ad7-2fb2-4059-997a-107df77d50aa\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.757112 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dtq8\" (UniqueName: \"kubernetes.io/projected/71e11ad7-2fb2-4059-997a-107df77d50aa-kube-api-access-6dtq8\") pod \"71e11ad7-2fb2-4059-997a-107df77d50aa\" (UID: \"71e11ad7-2fb2-4059-997a-107df77d50aa\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.757159 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49ca39dc-75b9-48e7-a8c6-2b405d10a219-operator-scripts\") pod \"49ca39dc-75b9-48e7-a8c6-2b405d10a219\" (UID: \"49ca39dc-75b9-48e7-a8c6-2b405d10a219\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.757318 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbc7c682-a173-43e9-bca1-404c12b4b333-operator-scripts\") pod \"bbc7c682-a173-43e9-bca1-404c12b4b333\" (UID: \"bbc7c682-a173-43e9-bca1-404c12b4b333\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.757341 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdc7b\" (UniqueName: \"kubernetes.io/projected/49ca39dc-75b9-48e7-a8c6-2b405d10a219-kube-api-access-bdc7b\") pod \"49ca39dc-75b9-48e7-a8c6-2b405d10a219\" (UID: \"49ca39dc-75b9-48e7-a8c6-2b405d10a219\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.757368 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2b6v\" (UniqueName: \"kubernetes.io/projected/76950441-48ad-4562-bbf6-15fd01c6d2d4-kube-api-access-x2b6v\") pod \"76950441-48ad-4562-bbf6-15fd01c6d2d4\" (UID: \"76950441-48ad-4562-bbf6-15fd01c6d2d4\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.757397 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed94a520-b8d1-4f49-97a4-12f97b6c814d-scripts\") pod \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.757415 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-run-ovn\") pod \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.757456 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ed94a520-b8d1-4f49-97a4-12f97b6c814d-additional-scripts\") pod \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\" (UID: \"ed94a520-b8d1-4f49-97a4-12f97b6c814d\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.757916 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rl46f\" (UniqueName: \"kubernetes.io/projected/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706-kube-api-access-rl46f\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.757927 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.757936 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfz82\" (UniqueName: \"kubernetes.io/projected/0329c472-ea03-47c4-a6e7-ae1f89169e6b-kube-api-access-vfz82\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.757945 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0329c472-ea03-47c4-a6e7-ae1f89169e6b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.758093 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71e11ad7-2fb2-4059-997a-107df77d50aa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "71e11ad7-2fb2-4059-997a-107df77d50aa" (UID: "71e11ad7-2fb2-4059-997a-107df77d50aa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.758156 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "ed94a520-b8d1-4f49-97a4-12f97b6c814d" (UID: "ed94a520-b8d1-4f49-97a4-12f97b6c814d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.759250 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed94a520-b8d1-4f49-97a4-12f97b6c814d-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "ed94a520-b8d1-4f49-97a4-12f97b6c814d" (UID: "ed94a520-b8d1-4f49-97a4-12f97b6c814d"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.763110 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ca39dc-75b9-48e7-a8c6-2b405d10a219-kube-api-access-bdc7b" (OuterVolumeSpecName: "kube-api-access-bdc7b") pod "49ca39dc-75b9-48e7-a8c6-2b405d10a219" (UID: "49ca39dc-75b9-48e7-a8c6-2b405d10a219"). InnerVolumeSpecName "kube-api-access-bdc7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.763405 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49ca39dc-75b9-48e7-a8c6-2b405d10a219-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "49ca39dc-75b9-48e7-a8c6-2b405d10a219" (UID: "49ca39dc-75b9-48e7-a8c6-2b405d10a219"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.763764 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbc7c682-a173-43e9-bca1-404c12b4b333-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bbc7c682-a173-43e9-bca1-404c12b4b333" (UID: "bbc7c682-a173-43e9-bca1-404c12b4b333"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.765542 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76950441-48ad-4562-bbf6-15fd01c6d2d4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "76950441-48ad-4562-bbf6-15fd01c6d2d4" (UID: "76950441-48ad-4562-bbf6-15fd01c6d2d4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.765610 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "ed94a520-b8d1-4f49-97a4-12f97b6c814d" (UID: "ed94a520-b8d1-4f49-97a4-12f97b6c814d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.765681 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-run" (OuterVolumeSpecName: "var-run") pod "ed94a520-b8d1-4f49-97a4-12f97b6c814d" (UID: "ed94a520-b8d1-4f49-97a4-12f97b6c814d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.765800 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed94a520-b8d1-4f49-97a4-12f97b6c814d-scripts" (OuterVolumeSpecName: "scripts") pod "ed94a520-b8d1-4f49-97a4-12f97b6c814d" (UID: "ed94a520-b8d1-4f49-97a4-12f97b6c814d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.768166 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed94a520-b8d1-4f49-97a4-12f97b6c814d-kube-api-access-h4vfj" (OuterVolumeSpecName: "kube-api-access-h4vfj") pod "ed94a520-b8d1-4f49-97a4-12f97b6c814d" (UID: "ed94a520-b8d1-4f49-97a4-12f97b6c814d"). InnerVolumeSpecName "kube-api-access-h4vfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.768580 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76950441-48ad-4562-bbf6-15fd01c6d2d4-kube-api-access-x2b6v" (OuterVolumeSpecName: "kube-api-access-x2b6v") pod "76950441-48ad-4562-bbf6-15fd01c6d2d4" (UID: "76950441-48ad-4562-bbf6-15fd01c6d2d4"). InnerVolumeSpecName "kube-api-access-x2b6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.770141 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbc7c682-a173-43e9-bca1-404c12b4b333-kube-api-access-l62q6" (OuterVolumeSpecName: "kube-api-access-l62q6") pod "bbc7c682-a173-43e9-bca1-404c12b4b333" (UID: "bbc7c682-a173-43e9-bca1-404c12b4b333"). InnerVolumeSpecName "kube-api-access-l62q6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.770275 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71e11ad7-2fb2-4059-997a-107df77d50aa-kube-api-access-6dtq8" (OuterVolumeSpecName: "kube-api-access-6dtq8") pod "71e11ad7-2fb2-4059-997a-107df77d50aa" (UID: "71e11ad7-2fb2-4059-997a-107df77d50aa"). InnerVolumeSpecName "kube-api-access-6dtq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.859615 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd-operator-scripts\") pod \"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd\" (UID: \"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.859895 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clpxz\" (UniqueName: \"kubernetes.io/projected/222237ea-cd02-4919-b29c-9770862a1d51-kube-api-access-clpxz\") pod \"222237ea-cd02-4919-b29c-9770862a1d51\" (UID: \"222237ea-cd02-4919-b29c-9770862a1d51\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.860329 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0591104a-5134-4402-aa0f-4b94a79aa5df-operator-scripts\") pod \"0591104a-5134-4402-aa0f-4b94a79aa5df\" (UID: \"0591104a-5134-4402-aa0f-4b94a79aa5df\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.860434 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f744w\" (UniqueName: \"kubernetes.io/projected/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd-kube-api-access-f744w\") pod \"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd\" (UID: \"82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.860556 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/222237ea-cd02-4919-b29c-9770862a1d51-operator-scripts\") pod \"222237ea-cd02-4919-b29c-9770862a1d51\" (UID: \"222237ea-cd02-4919-b29c-9770862a1d51\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.860784 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0591104a-5134-4402-aa0f-4b94a79aa5df-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0591104a-5134-4402-aa0f-4b94a79aa5df" (UID: "0591104a-5134-4402-aa0f-4b94a79aa5df"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.860797 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps2vh\" (UniqueName: \"kubernetes.io/projected/0591104a-5134-4402-aa0f-4b94a79aa5df-kube-api-access-ps2vh\") pod \"0591104a-5134-4402-aa0f-4b94a79aa5df\" (UID: \"0591104a-5134-4402-aa0f-4b94a79aa5df\") " Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.860586 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd" (UID: "82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861038 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/222237ea-cd02-4919-b29c-9770862a1d51-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "222237ea-cd02-4919-b29c-9770862a1d51" (UID: "222237ea-cd02-4919-b29c-9770862a1d51"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861708 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2b6v\" (UniqueName: \"kubernetes.io/projected/76950441-48ad-4562-bbf6-15fd01c6d2d4-kube-api-access-x2b6v\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861742 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861753 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed94a520-b8d1-4f49-97a4-12f97b6c814d-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861764 4755 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861774 4755 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ed94a520-b8d1-4f49-97a4-12f97b6c814d-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861782 4755 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861790 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0591104a-5134-4402-aa0f-4b94a79aa5df-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861798 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/222237ea-cd02-4919-b29c-9770862a1d51-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861806 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4vfj\" (UniqueName: \"kubernetes.io/projected/ed94a520-b8d1-4f49-97a4-12f97b6c814d-kube-api-access-h4vfj\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861814 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l62q6\" (UniqueName: \"kubernetes.io/projected/bbc7c682-a173-43e9-bca1-404c12b4b333-kube-api-access-l62q6\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861823 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76950441-48ad-4562-bbf6-15fd01c6d2d4-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861832 4755 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed94a520-b8d1-4f49-97a4-12f97b6c814d-var-run\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861841 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71e11ad7-2fb2-4059-997a-107df77d50aa-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861850 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dtq8\" (UniqueName: \"kubernetes.io/projected/71e11ad7-2fb2-4059-997a-107df77d50aa-kube-api-access-6dtq8\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861859 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49ca39dc-75b9-48e7-a8c6-2b405d10a219-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861867 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbc7c682-a173-43e9-bca1-404c12b4b333-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.861875 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdc7b\" (UniqueName: \"kubernetes.io/projected/49ca39dc-75b9-48e7-a8c6-2b405d10a219-kube-api-access-bdc7b\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.866222 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/222237ea-cd02-4919-b29c-9770862a1d51-kube-api-access-clpxz" (OuterVolumeSpecName: "kube-api-access-clpxz") pod "222237ea-cd02-4919-b29c-9770862a1d51" (UID: "222237ea-cd02-4919-b29c-9770862a1d51"). InnerVolumeSpecName "kube-api-access-clpxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.866319 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd-kube-api-access-f744w" (OuterVolumeSpecName: "kube-api-access-f744w") pod "82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd" (UID: "82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd"). InnerVolumeSpecName "kube-api-access-f744w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.866536 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0591104a-5134-4402-aa0f-4b94a79aa5df-kube-api-access-ps2vh" (OuterVolumeSpecName: "kube-api-access-ps2vh") pod "0591104a-5134-4402-aa0f-4b94a79aa5df" (UID: "0591104a-5134-4402-aa0f-4b94a79aa5df"). InnerVolumeSpecName "kube-api-access-ps2vh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.964129 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f744w\" (UniqueName: \"kubernetes.io/projected/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd-kube-api-access-f744w\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.964188 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps2vh\" (UniqueName: \"kubernetes.io/projected/0591104a-5134-4402-aa0f-4b94a79aa5df-kube-api-access-ps2vh\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:21 crc kubenswrapper[4755]: I0202 22:53:21.964207 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clpxz\" (UniqueName: \"kubernetes.io/projected/222237ea-cd02-4919-b29c-9770862a1d51-kube-api-access-clpxz\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.552375 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-8j88c" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.552436 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ed0a-account-create-update-5rdjc" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.552459 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-2fbd-account-create-update-gv4s4" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.552472 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-d4c24" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.552471 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-l5tks" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.552480 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-ftxm7" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.552512 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6a80-account-create-update-hngf6" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.552519 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8flnx" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.552553 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m-config-8628s" Feb 02 22:53:22 crc kubenswrapper[4755]: E0202 22:53:22.679468 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod82b3aa0f_e2ee_4c4c_81ac_48571c2cdbdd.slice/crio-015b428006324f9d2983dcccd703f6124bb5de9767a6faf2fa8bb49c264a4516\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded94a520_b8d1_4f49_97a4_12f97b6c814d.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76950441_48ad_4562_bbf6_15fd01c6d2d4.slice\": RecentStats: unable to find data in memory cache]" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.764412 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jdd7m-config-8628s"] Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.780996 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jdd7m-config-8628s"] Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.868629 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jdd7m-config-mfxf8"] Feb 02 22:53:22 crc kubenswrapper[4755]: E0202 22:53:22.869269 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76950441-48ad-4562-bbf6-15fd01c6d2d4" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.869338 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="76950441-48ad-4562-bbf6-15fd01c6d2d4" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: E0202 22:53:22.869402 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbc7c682-a173-43e9-bca1-404c12b4b333" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.869528 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbc7c682-a173-43e9-bca1-404c12b4b333" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: E0202 22:53:22.869595 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="222237ea-cd02-4919-b29c-9770862a1d51" containerName="mariadb-database-create" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.869648 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="222237ea-cd02-4919-b29c-9770862a1d51" containerName="mariadb-database-create" Feb 02 22:53:22 crc kubenswrapper[4755]: E0202 22:53:22.869706 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0591104a-5134-4402-aa0f-4b94a79aa5df" containerName="mariadb-database-create" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.869783 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0591104a-5134-4402-aa0f-4b94a79aa5df" containerName="mariadb-database-create" Feb 02 22:53:22 crc kubenswrapper[4755]: E0202 22:53:22.869870 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49ca39dc-75b9-48e7-a8c6-2b405d10a219" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.869930 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="49ca39dc-75b9-48e7-a8c6-2b405d10a219" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: E0202 22:53:22.869988 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed94a520-b8d1-4f49-97a4-12f97b6c814d" containerName="ovn-config" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.870045 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed94a520-b8d1-4f49-97a4-12f97b6c814d" containerName="ovn-config" Feb 02 22:53:22 crc kubenswrapper[4755]: E0202 22:53:22.870105 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e05b09d-5b2a-4b6a-8ff9-dbbe19018706" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.870169 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e05b09d-5b2a-4b6a-8ff9-dbbe19018706" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: E0202 22:53:22.870242 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.870295 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: E0202 22:53:22.870357 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71e11ad7-2fb2-4059-997a-107df77d50aa" containerName="mariadb-database-create" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.870409 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="71e11ad7-2fb2-4059-997a-107df77d50aa" containerName="mariadb-database-create" Feb 02 22:53:22 crc kubenswrapper[4755]: E0202 22:53:22.870467 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0329c472-ea03-47c4-a6e7-ae1f89169e6b" containerName="mariadb-database-create" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.870516 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0329c472-ea03-47c4-a6e7-ae1f89169e6b" containerName="mariadb-database-create" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.870749 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="76950441-48ad-4562-bbf6-15fd01c6d2d4" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.870847 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="71e11ad7-2fb2-4059-997a-107df77d50aa" containerName="mariadb-database-create" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.871588 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="0329c472-ea03-47c4-a6e7-ae1f89169e6b" containerName="mariadb-database-create" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.871660 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbc7c682-a173-43e9-bca1-404c12b4b333" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.871720 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.871798 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed94a520-b8d1-4f49-97a4-12f97b6c814d" containerName="ovn-config" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.871873 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="49ca39dc-75b9-48e7-a8c6-2b405d10a219" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.871949 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="0591104a-5134-4402-aa0f-4b94a79aa5df" containerName="mariadb-database-create" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.872019 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e05b09d-5b2a-4b6a-8ff9-dbbe19018706" containerName="mariadb-account-create-update" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.872078 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="222237ea-cd02-4919-b29c-9770862a1d51" containerName="mariadb-database-create" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.872780 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.875912 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jdd7m-config-mfxf8"] Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.879195 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.984396 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-log-ovn\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.984776 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-run-ovn\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.984908 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27de346e-c715-4829-996e-e4f1bf2b1452-scripts\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.985270 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm4td\" (UniqueName: \"kubernetes.io/projected/27de346e-c715-4829-996e-e4f1bf2b1452-kube-api-access-qm4td\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.985344 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-run\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:22 crc kubenswrapper[4755]: I0202 22:53:22.985442 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/27de346e-c715-4829-996e-e4f1bf2b1452-additional-scripts\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.087370 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27de346e-c715-4829-996e-e4f1bf2b1452-scripts\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.087508 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm4td\" (UniqueName: \"kubernetes.io/projected/27de346e-c715-4829-996e-e4f1bf2b1452-kube-api-access-qm4td\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.087557 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-run\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.087613 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/27de346e-c715-4829-996e-e4f1bf2b1452-additional-scripts\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.087678 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed94a520-b8d1-4f49-97a4-12f97b6c814d" path="/var/lib/kubelet/pods/ed94a520-b8d1-4f49-97a4-12f97b6c814d/volumes" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.087951 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-run\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.087995 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-log-ovn\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.088154 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-run-ovn\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.088317 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-log-ovn\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.088372 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-run-ovn\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.088696 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/27de346e-c715-4829-996e-e4f1bf2b1452-additional-scripts\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.092154 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27de346e-c715-4829-996e-e4f1bf2b1452-scripts\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.113415 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm4td\" (UniqueName: \"kubernetes.io/projected/27de346e-c715-4829-996e-e4f1bf2b1452-kube-api-access-qm4td\") pod \"ovn-controller-jdd7m-config-mfxf8\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.189127 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.373143 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-ingester-0" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.389416 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.389472 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.563792 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"a0b7303f0064b8b820aeadefcd233db7c3a44c21ad24b1b91d77fe8fd87ec139"} Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.563841 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"a599d2179cc0c8b3049fa5e032792adceb1eb57786c749d5e272162ec68270ac"} Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.564837 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-x247p" event={"ID":"47165bb1-af4a-4e73-957e-8f0845a29841","Type":"ContainerStarted","Data":"860e0b0b085a0b082e86f1391c72d6a9122cb59891614f23987abcefce7dc5e8"} Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.583943 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-x247p" podStartSLOduration=10.683802404 podStartE2EDuration="15.583922011s" podCreationTimestamp="2026-02-02 22:53:08 +0000 UTC" firstStartedPulling="2026-02-02 22:53:16.35033141 +0000 UTC m=+1152.041551736" lastFinishedPulling="2026-02-02 22:53:21.250451017 +0000 UTC m=+1156.941671343" observedRunningTime="2026-02-02 22:53:23.578534561 +0000 UTC m=+1159.269754897" watchObservedRunningTime="2026-02-02 22:53:23.583922011 +0000 UTC m=+1159.275142337" Feb 02 22:53:23 crc kubenswrapper[4755]: I0202 22:53:23.677415 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jdd7m-config-mfxf8"] Feb 02 22:53:23 crc kubenswrapper[4755]: W0202 22:53:23.702824 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27de346e_c715_4829_996e_e4f1bf2b1452.slice/crio-c5b38f2327cde3a1c69442f2b4b7fbfce91c099b8f67c2df9474bd2af7a62007 WatchSource:0}: Error finding container c5b38f2327cde3a1c69442f2b4b7fbfce91c099b8f67c2df9474bd2af7a62007: Status 404 returned error can't find the container with id c5b38f2327cde3a1c69442f2b4b7fbfce91c099b8f67c2df9474bd2af7a62007 Feb 02 22:53:24 crc kubenswrapper[4755]: I0202 22:53:24.586431 4755 generic.go:334] "Generic (PLEG): container finished" podID="27de346e-c715-4829-996e-e4f1bf2b1452" containerID="7c20449becb37e1509e2bcbb6395799eb5bd56dab8a511364e8d95d89937ac9e" exitCode=0 Feb 02 22:53:24 crc kubenswrapper[4755]: I0202 22:53:24.586499 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m-config-mfxf8" event={"ID":"27de346e-c715-4829-996e-e4f1bf2b1452","Type":"ContainerDied","Data":"7c20449becb37e1509e2bcbb6395799eb5bd56dab8a511364e8d95d89937ac9e"} Feb 02 22:53:24 crc kubenswrapper[4755]: I0202 22:53:24.587036 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m-config-mfxf8" event={"ID":"27de346e-c715-4829-996e-e4f1bf2b1452","Type":"ContainerStarted","Data":"c5b38f2327cde3a1c69442f2b4b7fbfce91c099b8f67c2df9474bd2af7a62007"} Feb 02 22:53:25 crc kubenswrapper[4755]: I0202 22:53:25.618157 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"6fef1c3c65e0fdeceb270f0c9e3a3cef51ab0a319ae0aebc0a88ed842779c881"} Feb 02 22:53:25 crc kubenswrapper[4755]: I0202 22:53:25.618510 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"c3e386fc47380c4642349ef650720148d745fabb303fd79601f4df13346a6674"} Feb 02 22:53:25 crc kubenswrapper[4755]: I0202 22:53:25.618522 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"8511b4a58d5b9a1f247e256d3183eca0be42a93acd2be766cc0ba472eab6d03c"} Feb 02 22:53:25 crc kubenswrapper[4755]: I0202 22:53:25.939140 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.042068 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27de346e-c715-4829-996e-e4f1bf2b1452-scripts\") pod \"27de346e-c715-4829-996e-e4f1bf2b1452\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.042167 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-run\") pod \"27de346e-c715-4829-996e-e4f1bf2b1452\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.042235 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-run-ovn\") pod \"27de346e-c715-4829-996e-e4f1bf2b1452\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.042336 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-log-ovn\") pod \"27de346e-c715-4829-996e-e4f1bf2b1452\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.042363 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/27de346e-c715-4829-996e-e4f1bf2b1452-additional-scripts\") pod \"27de346e-c715-4829-996e-e4f1bf2b1452\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.042396 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qm4td\" (UniqueName: \"kubernetes.io/projected/27de346e-c715-4829-996e-e4f1bf2b1452-kube-api-access-qm4td\") pod \"27de346e-c715-4829-996e-e4f1bf2b1452\" (UID: \"27de346e-c715-4829-996e-e4f1bf2b1452\") " Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.042585 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "27de346e-c715-4829-996e-e4f1bf2b1452" (UID: "27de346e-c715-4829-996e-e4f1bf2b1452"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.042630 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-run" (OuterVolumeSpecName: "var-run") pod "27de346e-c715-4829-996e-e4f1bf2b1452" (UID: "27de346e-c715-4829-996e-e4f1bf2b1452"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.042691 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "27de346e-c715-4829-996e-e4f1bf2b1452" (UID: "27de346e-c715-4829-996e-e4f1bf2b1452"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.043301 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27de346e-c715-4829-996e-e4f1bf2b1452-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "27de346e-c715-4829-996e-e4f1bf2b1452" (UID: "27de346e-c715-4829-996e-e4f1bf2b1452"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.043464 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27de346e-c715-4829-996e-e4f1bf2b1452-scripts" (OuterVolumeSpecName: "scripts") pod "27de346e-c715-4829-996e-e4f1bf2b1452" (UID: "27de346e-c715-4829-996e-e4f1bf2b1452"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.043625 4755 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-run\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.043790 4755 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.043886 4755 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/27de346e-c715-4829-996e-e4f1bf2b1452-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.053955 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27de346e-c715-4829-996e-e4f1bf2b1452-kube-api-access-qm4td" (OuterVolumeSpecName: "kube-api-access-qm4td") pod "27de346e-c715-4829-996e-e4f1bf2b1452" (UID: "27de346e-c715-4829-996e-e4f1bf2b1452"). InnerVolumeSpecName "kube-api-access-qm4td". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.145758 4755 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/27de346e-c715-4829-996e-e4f1bf2b1452-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.146084 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qm4td\" (UniqueName: \"kubernetes.io/projected/27de346e-c715-4829-996e-e4f1bf2b1452-kube-api-access-qm4td\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.146099 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27de346e-c715-4829-996e-e4f1bf2b1452-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.631264 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jdd7m-config-mfxf8" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.631350 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jdd7m-config-mfxf8" event={"ID":"27de346e-c715-4829-996e-e4f1bf2b1452","Type":"ContainerDied","Data":"c5b38f2327cde3a1c69442f2b4b7fbfce91c099b8f67c2df9474bd2af7a62007"} Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.631385 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5b38f2327cde3a1c69442f2b4b7fbfce91c099b8f67c2df9474bd2af7a62007" Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.640554 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"9da76adf7d424d396a3fb614b2c201fae3471e2617b58ab69de1e0a934a9c274"} Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.642211 4755 generic.go:334] "Generic (PLEG): container finished" podID="02b6b0aa-4f58-4de4-83b7-c3291e005325" containerID="2e512cc897c06d8cdd94d0c3021b290bde7f9250a0d58d86a9b882dc4178d456" exitCode=0 Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.642250 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-fdxn5" event={"ID":"02b6b0aa-4f58-4de4-83b7-c3291e005325","Type":"ContainerDied","Data":"2e512cc897c06d8cdd94d0c3021b290bde7f9250a0d58d86a9b882dc4178d456"} Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.644034 4755 generic.go:334] "Generic (PLEG): container finished" podID="47165bb1-af4a-4e73-957e-8f0845a29841" containerID="860e0b0b085a0b082e86f1391c72d6a9122cb59891614f23987abcefce7dc5e8" exitCode=0 Feb 02 22:53:26 crc kubenswrapper[4755]: I0202 22:53:26.644061 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-x247p" event={"ID":"47165bb1-af4a-4e73-957e-8f0845a29841","Type":"ContainerDied","Data":"860e0b0b085a0b082e86f1391c72d6a9122cb59891614f23987abcefce7dc5e8"} Feb 02 22:53:27 crc kubenswrapper[4755]: I0202 22:53:27.015805 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jdd7m-config-mfxf8"] Feb 02 22:53:27 crc kubenswrapper[4755]: I0202 22:53:27.023921 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jdd7m-config-mfxf8"] Feb 02 22:53:27 crc kubenswrapper[4755]: I0202 22:53:27.081444 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27de346e-c715-4829-996e-e4f1bf2b1452" path="/var/lib/kubelet/pods/27de346e-c715-4829-996e-e4f1bf2b1452/volumes" Feb 02 22:53:27 crc kubenswrapper[4755]: I0202 22:53:27.656345 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"638954d375bca34816646edf85899325c3f4f0945133427006b07a5254482e27"} Feb 02 22:53:27 crc kubenswrapper[4755]: I0202 22:53:27.656630 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"e661435245d5a45057db4bc72570e940fd5ddcf9c1914f0f770b853a50e9c293"} Feb 02 22:53:27 crc kubenswrapper[4755]: I0202 22:53:27.656642 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"abb07ea5078ba9ba52c45bdd1f48814a296167de54890c9d951218c80d026380"} Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.059236 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.147954 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.185872 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-db-sync-config-data\") pod \"02b6b0aa-4f58-4de4-83b7-c3291e005325\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.185917 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-config-data\") pod \"02b6b0aa-4f58-4de4-83b7-c3291e005325\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.185962 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47165bb1-af4a-4e73-957e-8f0845a29841-combined-ca-bundle\") pod \"47165bb1-af4a-4e73-957e-8f0845a29841\" (UID: \"47165bb1-af4a-4e73-957e-8f0845a29841\") " Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.186000 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47165bb1-af4a-4e73-957e-8f0845a29841-config-data\") pod \"47165bb1-af4a-4e73-957e-8f0845a29841\" (UID: \"47165bb1-af4a-4e73-957e-8f0845a29841\") " Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.186107 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-combined-ca-bundle\") pod \"02b6b0aa-4f58-4de4-83b7-c3291e005325\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.186222 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5mtg\" (UniqueName: \"kubernetes.io/projected/02b6b0aa-4f58-4de4-83b7-c3291e005325-kube-api-access-v5mtg\") pod \"02b6b0aa-4f58-4de4-83b7-c3291e005325\" (UID: \"02b6b0aa-4f58-4de4-83b7-c3291e005325\") " Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.186260 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkt5k\" (UniqueName: \"kubernetes.io/projected/47165bb1-af4a-4e73-957e-8f0845a29841-kube-api-access-kkt5k\") pod \"47165bb1-af4a-4e73-957e-8f0845a29841\" (UID: \"47165bb1-af4a-4e73-957e-8f0845a29841\") " Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.190268 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47165bb1-af4a-4e73-957e-8f0845a29841-kube-api-access-kkt5k" (OuterVolumeSpecName: "kube-api-access-kkt5k") pod "47165bb1-af4a-4e73-957e-8f0845a29841" (UID: "47165bb1-af4a-4e73-957e-8f0845a29841"). InnerVolumeSpecName "kube-api-access-kkt5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.190434 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "02b6b0aa-4f58-4de4-83b7-c3291e005325" (UID: "02b6b0aa-4f58-4de4-83b7-c3291e005325"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.197172 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02b6b0aa-4f58-4de4-83b7-c3291e005325-kube-api-access-v5mtg" (OuterVolumeSpecName: "kube-api-access-v5mtg") pod "02b6b0aa-4f58-4de4-83b7-c3291e005325" (UID: "02b6b0aa-4f58-4de4-83b7-c3291e005325"). InnerVolumeSpecName "kube-api-access-v5mtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.289440 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkt5k\" (UniqueName: \"kubernetes.io/projected/47165bb1-af4a-4e73-957e-8f0845a29841-kube-api-access-kkt5k\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.289702 4755 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.289713 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5mtg\" (UniqueName: \"kubernetes.io/projected/02b6b0aa-4f58-4de4-83b7-c3291e005325-kube-api-access-v5mtg\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.352692 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02b6b0aa-4f58-4de4-83b7-c3291e005325" (UID: "02b6b0aa-4f58-4de4-83b7-c3291e005325"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.352932 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47165bb1-af4a-4e73-957e-8f0845a29841-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47165bb1-af4a-4e73-957e-8f0845a29841" (UID: "47165bb1-af4a-4e73-957e-8f0845a29841"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.376199 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-config-data" (OuterVolumeSpecName: "config-data") pod "02b6b0aa-4f58-4de4-83b7-c3291e005325" (UID: "02b6b0aa-4f58-4de4-83b7-c3291e005325"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.391989 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.392019 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02b6b0aa-4f58-4de4-83b7-c3291e005325-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.392032 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47165bb1-af4a-4e73-957e-8f0845a29841-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.394517 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47165bb1-af4a-4e73-957e-8f0845a29841-config-data" (OuterVolumeSpecName: "config-data") pod "47165bb1-af4a-4e73-957e-8f0845a29841" (UID: "47165bb1-af4a-4e73-957e-8f0845a29841"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.494452 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47165bb1-af4a-4e73-957e-8f0845a29841-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.675016 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"649d1b051757da955583ae97b01034a6746f96ec4bb0460b3129c531cfb907bb"} Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.675075 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"4db826bcae1c50847967b7bb96d503bbf38a465410a5212ccb061d327b313b06"} Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.677763 4755 generic.go:334] "Generic (PLEG): container finished" podID="56e97e05-60e4-4c71-b081-18bb5dde670b" containerID="18a426a34d3ccdc567f84e25ebfd124daab172df4d3fa4b9213c7bc5b2a4a44f" exitCode=0 Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.677840 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"56e97e05-60e4-4c71-b081-18bb5dde670b","Type":"ContainerDied","Data":"18a426a34d3ccdc567f84e25ebfd124daab172df4d3fa4b9213c7bc5b2a4a44f"} Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.682566 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-fdxn5" event={"ID":"02b6b0aa-4f58-4de4-83b7-c3291e005325","Type":"ContainerDied","Data":"0928139590d028ae619b01c1d4c69ce94ae09b0314491098bde10a75e14772a7"} Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.682623 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0928139590d028ae619b01c1d4c69ce94ae09b0314491098bde10a75e14772a7" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.684171 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-fdxn5" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.689408 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-x247p" event={"ID":"47165bb1-af4a-4e73-957e-8f0845a29841","Type":"ContainerDied","Data":"383720aa356499797f5f6966bf3fca2f49f8124c0e31fef0241b913e2d121844"} Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.689461 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="383720aa356499797f5f6966bf3fca2f49f8124c0e31fef0241b913e2d121844" Feb 02 22:53:28 crc kubenswrapper[4755]: I0202 22:53:28.689563 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-x247p" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.016451 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-bjfzp"] Feb 02 22:53:29 crc kubenswrapper[4755]: E0202 22:53:29.017152 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47165bb1-af4a-4e73-957e-8f0845a29841" containerName="keystone-db-sync" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.017169 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="47165bb1-af4a-4e73-957e-8f0845a29841" containerName="keystone-db-sync" Feb 02 22:53:29 crc kubenswrapper[4755]: E0202 22:53:29.017183 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27de346e-c715-4829-996e-e4f1bf2b1452" containerName="ovn-config" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.017189 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="27de346e-c715-4829-996e-e4f1bf2b1452" containerName="ovn-config" Feb 02 22:53:29 crc kubenswrapper[4755]: E0202 22:53:29.017220 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02b6b0aa-4f58-4de4-83b7-c3291e005325" containerName="glance-db-sync" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.017226 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="02b6b0aa-4f58-4de4-83b7-c3291e005325" containerName="glance-db-sync" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.017380 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="27de346e-c715-4829-996e-e4f1bf2b1452" containerName="ovn-config" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.017396 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="47165bb1-af4a-4e73-957e-8f0845a29841" containerName="keystone-db-sync" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.017408 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="02b6b0aa-4f58-4de4-83b7-c3291e005325" containerName="glance-db-sync" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.018065 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.029330 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.029379 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.029546 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.029715 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pvh95" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.029338 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.043788 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-qztdq"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.045568 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.108875 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-config\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.108931 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-fernet-keys\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.109036 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-credential-keys\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.109099 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-scripts\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.112369 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-qztdq"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.112402 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-bjfzp"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.114399 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-config-data\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.119401 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8f78\" (UniqueName: \"kubernetes.io/projected/3116a356-a337-47ea-89fc-6bdf583db10f-kube-api-access-c8f78\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.119513 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.119603 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.119850 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-combined-ca-bundle\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.120115 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.120230 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgwnp\" (UniqueName: \"kubernetes.io/projected/dc2ddb51-960d-4838-b97f-1e635994cb0a-kube-api-access-dgwnp\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.222188 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-credential-keys\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.222247 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-scripts\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.222279 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-config-data\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.222297 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8f78\" (UniqueName: \"kubernetes.io/projected/3116a356-a337-47ea-89fc-6bdf583db10f-kube-api-access-c8f78\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.222316 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.222350 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.222388 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-combined-ca-bundle\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.222436 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.222459 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgwnp\" (UniqueName: \"kubernetes.io/projected/dc2ddb51-960d-4838-b97f-1e635994cb0a-kube-api-access-dgwnp\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.222484 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-config\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.222507 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-fernet-keys\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.229015 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-fernet-keys\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.229081 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-2vwzg"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.229963 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-credential-keys\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.230611 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.232314 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-combined-ca-bundle\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.232368 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-config\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.233079 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.237114 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.237757 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.249069 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-scripts\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.261922 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.262060 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-config-data\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.262157 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-5g72q" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.263101 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.269470 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgwnp\" (UniqueName: \"kubernetes.io/projected/dc2ddb51-960d-4838-b97f-1e635994cb0a-kube-api-access-dgwnp\") pod \"dnsmasq-dns-5c9d85d47c-qztdq\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.281504 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8f78\" (UniqueName: \"kubernetes.io/projected/3116a356-a337-47ea-89fc-6bdf583db10f-kube-api-access-c8f78\") pod \"keystone-bootstrap-bjfzp\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.293906 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-2vwzg"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.326720 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlq8k\" (UniqueName: \"kubernetes.io/projected/45b91834-5ada-4402-85c4-df681a85c076-kube-api-access-vlq8k\") pod \"neutron-db-sync-2vwzg\" (UID: \"45b91834-5ada-4402-85c4-df681a85c076\") " pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.326806 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/45b91834-5ada-4402-85c4-df681a85c076-config\") pod \"neutron-db-sync-2vwzg\" (UID: \"45b91834-5ada-4402-85c4-df681a85c076\") " pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.326899 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b91834-5ada-4402-85c4-df681a85c076-combined-ca-bundle\") pod \"neutron-db-sync-2vwzg\" (UID: \"45b91834-5ada-4402-85c4-df681a85c076\") " pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.339508 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-pwclg"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.340690 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.350610 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-mf9xr" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.350813 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.350935 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.369795 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-pwclg"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.421847 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-75zwb"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.423259 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.430708 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/45b91834-5ada-4402-85c4-df681a85c076-config\") pod \"neutron-db-sync-2vwzg\" (UID: \"45b91834-5ada-4402-85c4-df681a85c076\") " pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.430782 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-config-data\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.430867 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b91834-5ada-4402-85c4-df681a85c076-combined-ca-bundle\") pod \"neutron-db-sync-2vwzg\" (UID: \"45b91834-5ada-4402-85c4-df681a85c076\") " pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.430899 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-scripts\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.430958 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0403cc43-6199-4e95-b427-c4f268d8049a-etc-machine-id\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.430995 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-combined-ca-bundle\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.431026 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n67bd\" (UniqueName: \"kubernetes.io/projected/0403cc43-6199-4e95-b427-c4f268d8049a-kube-api-access-n67bd\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.431072 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-db-sync-config-data\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.431114 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlq8k\" (UniqueName: \"kubernetes.io/projected/45b91834-5ada-4402-85c4-df681a85c076-kube-api-access-vlq8k\") pod \"neutron-db-sync-2vwzg\" (UID: \"45b91834-5ada-4402-85c4-df681a85c076\") " pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.433613 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.433904 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.434039 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-6xjk4" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.434165 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.434885 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-75zwb"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.435748 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.438798 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.440982 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.444522 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.444930 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.448147 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/45b91834-5ada-4402-85c4-df681a85c076-config\") pod \"neutron-db-sync-2vwzg\" (UID: \"45b91834-5ada-4402-85c4-df681a85c076\") " pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.453453 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b91834-5ada-4402-85c4-df681a85c076-combined-ca-bundle\") pod \"neutron-db-sync-2vwzg\" (UID: \"45b91834-5ada-4402-85c4-df681a85c076\") " pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.458613 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-xwr7s"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.459095 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.463323 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlq8k\" (UniqueName: \"kubernetes.io/projected/45b91834-5ada-4402-85c4-df681a85c076-kube-api-access-vlq8k\") pod \"neutron-db-sync-2vwzg\" (UID: \"45b91834-5ada-4402-85c4-df681a85c076\") " pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.479225 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.489162 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-mkskt" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.489883 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.515803 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.519743 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-xwr7s"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.553611 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-scripts\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.553683 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0403cc43-6199-4e95-b427-c4f268d8049a-etc-machine-id\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.553716 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-combined-ca-bundle\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.553769 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-combined-ca-bundle\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.553802 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqqhl\" (UniqueName: \"kubernetes.io/projected/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-kube-api-access-tqqhl\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.553826 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n67bd\" (UniqueName: \"kubernetes.io/projected/0403cc43-6199-4e95-b427-c4f268d8049a-kube-api-access-n67bd\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.553850 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkdd7\" (UniqueName: \"kubernetes.io/projected/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-kube-api-access-nkdd7\") pod \"barbican-db-sync-xwr7s\" (UID: \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\") " pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.553883 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-certs\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.553923 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-db-sync-config-data\") pod \"barbican-db-sync-xwr7s\" (UID: \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\") " pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.553949 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.553971 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-db-sync-config-data\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.553997 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-config-data\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.554043 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-log-httpd\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.554070 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-scripts\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.554178 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-scripts\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.554204 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-config-data\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.554242 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.554268 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-combined-ca-bundle\") pod \"barbican-db-sync-xwr7s\" (UID: \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\") " pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.554291 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-run-httpd\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.554313 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-config-data\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.554376 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htwfg\" (UniqueName: \"kubernetes.io/projected/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-kube-api-access-htwfg\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.561519 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0403cc43-6199-4e95-b427-c4f268d8049a-etc-machine-id\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.576667 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-qztdq"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.590476 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-sk4gl"] Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.590713 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-combined-ca-bundle\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.591082 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-scripts\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.596824 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-db-sync-config-data\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.630071 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:29 crc kubenswrapper[4755]: I0202 22:53:29.630644 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-config-data\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.638964 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-kgxwf" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.639064 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.639246 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.652189 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.654916 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-sk4gl"] Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.663507 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-combined-ca-bundle\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.663901 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqqhl\" (UniqueName: \"kubernetes.io/projected/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-kube-api-access-tqqhl\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.663935 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkdd7\" (UniqueName: \"kubernetes.io/projected/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-kube-api-access-nkdd7\") pod \"barbican-db-sync-xwr7s\" (UID: \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\") " pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.663963 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-certs\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.664007 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-db-sync-config-data\") pod \"barbican-db-sync-xwr7s\" (UID: \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\") " pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.664027 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.664059 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-config-data\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.664290 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-log-httpd\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.664324 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-scripts\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.664406 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-scripts\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.664432 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-config-data\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.664501 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.664558 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-combined-ca-bundle\") pod \"barbican-db-sync-xwr7s\" (UID: \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\") " pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.664822 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-run-httpd\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.664921 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htwfg\" (UniqueName: \"kubernetes.io/projected/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-kube-api-access-htwfg\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.667986 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-run-httpd\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.668074 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-log-httpd\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.678477 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56798b757f-mktst"] Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.680027 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.698480 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-combined-ca-bundle\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.701263 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-scripts\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.701331 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n67bd\" (UniqueName: \"kubernetes.io/projected/0403cc43-6199-4e95-b427-c4f268d8049a-kube-api-access-n67bd\") pod \"cinder-db-sync-pwclg\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.701880 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.701879 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-certs\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.702483 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-db-sync-config-data\") pod \"barbican-db-sync-xwr7s\" (UID: \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\") " pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.705685 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-config-data\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.707485 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-scripts\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.707578 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-combined-ca-bundle\") pod \"barbican-db-sync-xwr7s\" (UID: \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\") " pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.715822 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-config-data\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.740347 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkdd7\" (UniqueName: \"kubernetes.io/projected/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-kube-api-access-nkdd7\") pod \"barbican-db-sync-xwr7s\" (UID: \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\") " pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.745752 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqqhl\" (UniqueName: \"kubernetes.io/projected/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-kube-api-access-tqqhl\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.749342 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.750556 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htwfg\" (UniqueName: \"kubernetes.io/projected/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-kube-api-access-htwfg\") pod \"cloudkitty-db-sync-75zwb\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.766411 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dwr6\" (UniqueName: \"kubernetes.io/projected/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-kube-api-access-6dwr6\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.766480 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-config\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.766547 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-config-data\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.766575 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9xwt\" (UniqueName: \"kubernetes.io/projected/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-kube-api-access-l9xwt\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.766611 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-combined-ca-bundle\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.766701 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-ovsdbserver-sb\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.766782 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-ovsdbserver-nb\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.766801 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-scripts\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.766838 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-logs\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.766859 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-dns-svc\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.778643 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"2a8a2059cb329e5dc46ea7831e0cd03c18b8df9b370a3934a6461f9bbe407025"} Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.778686 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"50a165a2-aeeb-4f83-9af3-a33f76b34a39","Type":"ContainerStarted","Data":"400a4b6694536872cd94a5de88664c54fbaff150b1815aa58bf7b4e9bd5820d8"} Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.779074 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-mktst"] Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.780377 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.794490 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"56e97e05-60e4-4c71-b081-18bb5dde670b","Type":"ContainerStarted","Data":"a563051417b5e49ebd9acccb7572231779f7abb7c11f73d9cf6d34477dea8a5d"} Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.806842 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.828150 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.871465 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dwr6\" (UniqueName: \"kubernetes.io/projected/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-kube-api-access-6dwr6\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.871556 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-config\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.871611 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-config-data\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.871636 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9xwt\" (UniqueName: \"kubernetes.io/projected/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-kube-api-access-l9xwt\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.871683 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-combined-ca-bundle\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.871797 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-ovsdbserver-sb\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.871827 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-ovsdbserver-nb\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.871848 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-scripts\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.871886 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-logs\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.871907 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-dns-svc\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.872634 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-dns-svc\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.872673 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-config\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.874354 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-ovsdbserver-sb\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.882283 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-ovsdbserver-nb\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.882857 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=37.443994523 podStartE2EDuration="47.882845396s" podCreationTimestamp="2026-02-02 22:52:42 +0000 UTC" firstStartedPulling="2026-02-02 22:53:16.440176416 +0000 UTC m=+1152.131396742" lastFinishedPulling="2026-02-02 22:53:26.879027289 +0000 UTC m=+1162.570247615" observedRunningTime="2026-02-02 22:53:29.82756184 +0000 UTC m=+1165.518782166" watchObservedRunningTime="2026-02-02 22:53:29.882845396 +0000 UTC m=+1165.574065722" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.892515 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-scripts\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.892767 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-logs\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.899703 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-combined-ca-bundle\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.909937 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dwr6\" (UniqueName: \"kubernetes.io/projected/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-kube-api-access-6dwr6\") pod \"dnsmasq-dns-56798b757f-mktst\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.912171 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9xwt\" (UniqueName: \"kubernetes.io/projected/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-kube-api-access-l9xwt\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.912464 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-config-data\") pod \"placement-db-sync-sk4gl\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:29.993928 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pwclg" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.100196 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sk4gl" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.114138 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.204088 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-mktst"] Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.212277 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-72bxr"] Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.218000 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.227083 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.247480 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-72bxr"] Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.320468 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.322333 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.328222 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2l6wf" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.328414 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.333713 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.352670 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.392163 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.392225 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbjcb\" (UniqueName: \"kubernetes.io/projected/48a013b4-3dbe-4944-b931-474ec989a214-kube-api-access-sbjcb\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.392283 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.392310 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.392348 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.392509 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-config\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.494481 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.494769 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbjcb\" (UniqueName: \"kubernetes.io/projected/48a013b4-3dbe-4944-b931-474ec989a214-kube-api-access-sbjcb\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.494821 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b82ad4da-6bed-49c5-95bf-9bcedede60ca-logs\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.494855 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.494885 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.494923 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.494946 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7x25\" (UniqueName: \"kubernetes.io/projected/b82ad4da-6bed-49c5-95bf-9bcedede60ca-kube-api-access-l7x25\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.494987 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.495010 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.495163 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b82ad4da-6bed-49c5-95bf-9bcedede60ca-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.495242 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.495260 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-scripts\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.495362 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-config-data\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.495433 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-config\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.496147 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.496344 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.501806 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-config\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.501888 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.572643 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbjcb\" (UniqueName: \"kubernetes.io/projected/48a013b4-3dbe-4944-b931-474ec989a214-kube-api-access-sbjcb\") pod \"dnsmasq-dns-56df8fb6b7-72bxr\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.597205 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b82ad4da-6bed-49c5-95bf-9bcedede60ca-logs\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.597293 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.597328 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7x25\" (UniqueName: \"kubernetes.io/projected/b82ad4da-6bed-49c5-95bf-9bcedede60ca-kube-api-access-l7x25\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.597370 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.597414 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b82ad4da-6bed-49c5-95bf-9bcedede60ca-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.597441 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-scripts\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.597473 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-config-data\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.597659 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b82ad4da-6bed-49c5-95bf-9bcedede60ca-logs\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.597953 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b82ad4da-6bed-49c5-95bf-9bcedede60ca-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.603573 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.603610 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/36a0f8b995f8d37a8776a91ffef55e84b7ae73b259c9d13bbc3129ab0c1d828a/globalmount\"" pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.669749 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.671405 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.674962 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-scripts\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.675942 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-config-data\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.677353 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7x25\" (UniqueName: \"kubernetes.io/projected/b82ad4da-6bed-49c5-95bf-9bcedede60ca-kube-api-access-l7x25\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.681479 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.691924 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.698427 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.807038 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.807096 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.807126 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km8bj\" (UniqueName: \"kubernetes.io/projected/85fa16be-f206-433c-b1b0-2e083a4ad58b-kube-api-access-km8bj\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.807157 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.807180 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/85fa16be-f206-433c-b1b0-2e083a4ad58b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.807233 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85fa16be-f206-433c-b1b0-2e083a4ad58b-logs\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.807250 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.850258 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-bjfzp"] Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.858149 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.910138 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.910223 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.911008 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km8bj\" (UniqueName: \"kubernetes.io/projected/85fa16be-f206-433c-b1b0-2e083a4ad58b-kube-api-access-km8bj\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.911066 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.911093 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/85fa16be-f206-433c-b1b0-2e083a4ad58b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.911176 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85fa16be-f206-433c-b1b0-2e083a4ad58b-logs\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.911196 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.911868 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85fa16be-f206-433c-b1b0-2e083a4ad58b-logs\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.912832 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/85fa16be-f206-433c-b1b0-2e083a4ad58b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.913280 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.913306 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/12ae2e05c1109de2cfa2799707fdeed95eed1c35304f00e5efa94d1e550db555/globalmount\"" pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.914759 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.916947 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.917457 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.930936 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km8bj\" (UniqueName: \"kubernetes.io/projected/85fa16be-f206-433c-b1b0-2e083a4ad58b-kube-api-access-km8bj\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:30 crc kubenswrapper[4755]: I0202 22:53:30.972624 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.038908 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.099930 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.183046 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-xwr7s"] Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.264621 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.282867 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-2vwzg"] Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.308130 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-75zwb"] Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.319442 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.328435 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-mktst"] Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.344654 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-qztdq"] Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.355403 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-sk4gl"] Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.372540 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-pwclg"] Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.386907 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.412672 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-72bxr"] Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.453230 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:53:31 crc kubenswrapper[4755]: W0202 22:53:31.477141 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45b91834_5ada_4402_85c4_df681a85c076.slice/crio-2b3394469bac09fd3c6d22a61dc42ee2b532d97569cd3dbb925365da74ec9a5b WatchSource:0}: Error finding container 2b3394469bac09fd3c6d22a61dc42ee2b532d97569cd3dbb925365da74ec9a5b: Status 404 returned error can't find the container with id 2b3394469bac09fd3c6d22a61dc42ee2b532d97569cd3dbb925365da74ec9a5b Feb 02 22:53:31 crc kubenswrapper[4755]: W0202 22:53:31.481390 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb79e2ee_e3af_4a8a_bb3e_a8f0deac67fe.slice/crio-300e25a36ed4b8ca65061808df7cb3dc29fa8205c246de2a2427b5910a887d98 WatchSource:0}: Error finding container 300e25a36ed4b8ca65061808df7cb3dc29fa8205c246de2a2427b5910a887d98: Status 404 returned error can't find the container with id 300e25a36ed4b8ca65061808df7cb3dc29fa8205c246de2a2427b5910a887d98 Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.629193 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.827141 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.867374 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pwclg" event={"ID":"0403cc43-6199-4e95-b427-c4f268d8049a","Type":"ContainerStarted","Data":"d36739d4bc15b804d079494ec6c033766dcd0b9408e2623c44c417fc982ec2d8"} Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.870398 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2vwzg" event={"ID":"45b91834-5ada-4402-85c4-df681a85c076","Type":"ContainerStarted","Data":"2b3394469bac09fd3c6d22a61dc42ee2b532d97569cd3dbb925365da74ec9a5b"} Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.871336 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" event={"ID":"dc2ddb51-960d-4838-b97f-1e635994cb0a","Type":"ContainerStarted","Data":"b64b9f51e15514d455e8364d23193d80fdae091a5d9d17af19410dbdf098dd8f"} Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.872138 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" event={"ID":"48a013b4-3dbe-4944-b931-474ec989a214","Type":"ContainerStarted","Data":"17367255b0464d66a939bf540ea1b0c83e8f701e6ba3a276bbcc53a3bf22d495"} Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.873142 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sk4gl" event={"ID":"27d7e0ea-4abd-4afe-be9b-460fbfea81c7","Type":"ContainerStarted","Data":"90f2f25850750a63e6507fca629ba805b7598a6320d7b425e7aba84610bd06f2"} Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.874862 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8","Type":"ContainerStarted","Data":"6ccfc9d528e12911b342e8cea0860a5123db5d641773654b47bdfb7ada2cb939"} Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.876275 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-75zwb" event={"ID":"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe","Type":"ContainerStarted","Data":"300e25a36ed4b8ca65061808df7cb3dc29fa8205c246de2a2427b5910a887d98"} Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.887005 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"56e97e05-60e4-4c71-b081-18bb5dde670b","Type":"ContainerStarted","Data":"70904b578d7354c545a98ac286d8272febfa63a3d4885e52d1ab3861bfbdab4c"} Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.894374 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xwr7s" event={"ID":"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda","Type":"ContainerStarted","Data":"0e47d6d930521f79046aa20a76502453ab0519e7a52d58b894d7b1c59c8d531c"} Feb 02 22:53:31 crc kubenswrapper[4755]: W0202 22:53:31.894577 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85fa16be_f206_433c_b1b0_2e083a4ad58b.slice/crio-aa20cc357fbd6eed8ba84e6a5a632f34ce2e89672a9c45e7b7df0d13119b7f96 WatchSource:0}: Error finding container aa20cc357fbd6eed8ba84e6a5a632f34ce2e89672a9c45e7b7df0d13119b7f96: Status 404 returned error can't find the container with id aa20cc357fbd6eed8ba84e6a5a632f34ce2e89672a9c45e7b7df0d13119b7f96 Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.904934 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bjfzp" event={"ID":"3116a356-a337-47ea-89fc-6bdf583db10f","Type":"ContainerStarted","Data":"e78b06eeab6351b5d2caf23f31e5e563938e396250852cd430074e5f3922fac1"} Feb 02 22:53:31 crc kubenswrapper[4755]: I0202 22:53:31.914630 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-mktst" event={"ID":"b3ecedb0-b612-46f5-9de6-d17ac9d404f3","Type":"ContainerStarted","Data":"b7c1fc447f0db55a37f2ffcc297b016d27c791690ae465a740a4eb99b1c288f3"} Feb 02 22:53:32 crc kubenswrapper[4755]: I0202 22:53:32.247375 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:53:32 crc kubenswrapper[4755]: I0202 22:53:32.933455 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"85fa16be-f206-433c-b1b0-2e083a4ad58b","Type":"ContainerStarted","Data":"aa20cc357fbd6eed8ba84e6a5a632f34ce2e89672a9c45e7b7df0d13119b7f96"} Feb 02 22:53:32 crc kubenswrapper[4755]: I0202 22:53:32.947762 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2vwzg" event={"ID":"45b91834-5ada-4402-85c4-df681a85c076","Type":"ContainerStarted","Data":"1720f86c20eca7c8b729f20ca7a07c4f32522ff9478534f117fa8228cc7d58a1"} Feb 02 22:53:32 crc kubenswrapper[4755]: I0202 22:53:32.985147 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-2vwzg" podStartSLOduration=3.985114457 podStartE2EDuration="3.985114457s" podCreationTimestamp="2026-02-02 22:53:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:53:32.974840072 +0000 UTC m=+1168.666060398" watchObservedRunningTime="2026-02-02 22:53:32.985114457 +0000 UTC m=+1168.676334783" Feb 02 22:53:32 crc kubenswrapper[4755]: I0202 22:53:32.986766 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"56e97e05-60e4-4c71-b081-18bb5dde670b","Type":"ContainerStarted","Data":"184cce367a5a48958aff90c30e52253e481028be9d4e42af58e644ec0b06a305"} Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.027613 4755 generic.go:334] "Generic (PLEG): container finished" podID="dc2ddb51-960d-4838-b97f-1e635994cb0a" containerID="13c57a197a1832331a116b8799e076f80445ce14757c069e737f4cdcaa570076" exitCode=0 Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.027855 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" event={"ID":"dc2ddb51-960d-4838-b97f-1e635994cb0a","Type":"ContainerDied","Data":"13c57a197a1832331a116b8799e076f80445ce14757c069e737f4cdcaa570076"} Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.031078 4755 generic.go:334] "Generic (PLEG): container finished" podID="48a013b4-3dbe-4944-b931-474ec989a214" containerID="645ed23646e73aeaae2545c06f3362e0c789f0688b3648fd5c2ab4c1fb64862d" exitCode=0 Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.034363 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" event={"ID":"48a013b4-3dbe-4944-b931-474ec989a214","Type":"ContainerDied","Data":"645ed23646e73aeaae2545c06f3362e0c789f0688b3648fd5c2ab4c1fb64862d"} Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.034402 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bjfzp" event={"ID":"3116a356-a337-47ea-89fc-6bdf583db10f","Type":"ContainerStarted","Data":"ad7b31f28ff90ca5b9e23cc67681ed4153a56ca002a9ead25fc3c089b7ff456f"} Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.052870 4755 generic.go:334] "Generic (PLEG): container finished" podID="b3ecedb0-b612-46f5-9de6-d17ac9d404f3" containerID="ded49aa1075d4c3dcd65d6ffd5ab879869cba65ee0b263b63980b9b7aede4855" exitCode=0 Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.053025 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-mktst" event={"ID":"b3ecedb0-b612-46f5-9de6-d17ac9d404f3","Type":"ContainerDied","Data":"ded49aa1075d4c3dcd65d6ffd5ab879869cba65ee0b263b63980b9b7aede4855"} Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.087928 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=17.087907863 podStartE2EDuration="17.087907863s" podCreationTimestamp="2026-02-02 22:53:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:53:33.056333656 +0000 UTC m=+1168.747553992" watchObservedRunningTime="2026-02-02 22:53:33.087907863 +0000 UTC m=+1168.779128189" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.113198 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b82ad4da-6bed-49c5-95bf-9bcedede60ca","Type":"ContainerStarted","Data":"eb15b637cc2a4b19b3e9f6b410bff9c95d4ac5791a594546e529384d6677364f"} Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.127166 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-bjfzp" podStartSLOduration=5.127144393 podStartE2EDuration="5.127144393s" podCreationTimestamp="2026-02-02 22:53:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:53:33.08602111 +0000 UTC m=+1168.777241456" watchObservedRunningTime="2026-02-02 22:53:33.127144393 +0000 UTC m=+1168.818364719" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.619172 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.659493 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.812462 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-ovsdbserver-nb\") pod \"dc2ddb51-960d-4838-b97f-1e635994cb0a\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.812787 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-ovsdbserver-sb\") pod \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.812843 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-config\") pod \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.812911 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgwnp\" (UniqueName: \"kubernetes.io/projected/dc2ddb51-960d-4838-b97f-1e635994cb0a-kube-api-access-dgwnp\") pod \"dc2ddb51-960d-4838-b97f-1e635994cb0a\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.812934 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-config\") pod \"dc2ddb51-960d-4838-b97f-1e635994cb0a\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.812957 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-dns-svc\") pod \"dc2ddb51-960d-4838-b97f-1e635994cb0a\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.812984 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dwr6\" (UniqueName: \"kubernetes.io/projected/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-kube-api-access-6dwr6\") pod \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.813101 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-ovsdbserver-sb\") pod \"dc2ddb51-960d-4838-b97f-1e635994cb0a\" (UID: \"dc2ddb51-960d-4838-b97f-1e635994cb0a\") " Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.813161 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-dns-svc\") pod \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.813178 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-ovsdbserver-nb\") pod \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\" (UID: \"b3ecedb0-b612-46f5-9de6-d17ac9d404f3\") " Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.827890 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-kube-api-access-6dwr6" (OuterVolumeSpecName: "kube-api-access-6dwr6") pod "b3ecedb0-b612-46f5-9de6-d17ac9d404f3" (UID: "b3ecedb0-b612-46f5-9de6-d17ac9d404f3"). InnerVolumeSpecName "kube-api-access-6dwr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.832713 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc2ddb51-960d-4838-b97f-1e635994cb0a-kube-api-access-dgwnp" (OuterVolumeSpecName: "kube-api-access-dgwnp") pod "dc2ddb51-960d-4838-b97f-1e635994cb0a" (UID: "dc2ddb51-960d-4838-b97f-1e635994cb0a"). InnerVolumeSpecName "kube-api-access-dgwnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.845781 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-config" (OuterVolumeSpecName: "config") pod "dc2ddb51-960d-4838-b97f-1e635994cb0a" (UID: "dc2ddb51-960d-4838-b97f-1e635994cb0a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.845983 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b3ecedb0-b612-46f5-9de6-d17ac9d404f3" (UID: "b3ecedb0-b612-46f5-9de6-d17ac9d404f3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.860473 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b3ecedb0-b612-46f5-9de6-d17ac9d404f3" (UID: "b3ecedb0-b612-46f5-9de6-d17ac9d404f3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.860976 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dc2ddb51-960d-4838-b97f-1e635994cb0a" (UID: "dc2ddb51-960d-4838-b97f-1e635994cb0a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.886636 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b3ecedb0-b612-46f5-9de6-d17ac9d404f3" (UID: "b3ecedb0-b612-46f5-9de6-d17ac9d404f3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.889506 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dc2ddb51-960d-4838-b97f-1e635994cb0a" (UID: "dc2ddb51-960d-4838-b97f-1e635994cb0a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.892154 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-config" (OuterVolumeSpecName: "config") pod "b3ecedb0-b612-46f5-9de6-d17ac9d404f3" (UID: "b3ecedb0-b612-46f5-9de6-d17ac9d404f3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.907901 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dc2ddb51-960d-4838-b97f-1e635994cb0a" (UID: "dc2ddb51-960d-4838-b97f-1e635994cb0a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.917854 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.917885 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.917895 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.917903 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.917912 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.917920 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.917928 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgwnp\" (UniqueName: \"kubernetes.io/projected/dc2ddb51-960d-4838-b97f-1e635994cb0a-kube-api-access-dgwnp\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.917940 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.917948 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc2ddb51-960d-4838-b97f-1e635994cb0a-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:33 crc kubenswrapper[4755]: I0202 22:53:33.917956 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dwr6\" (UniqueName: \"kubernetes.io/projected/b3ecedb0-b612-46f5-9de6-d17ac9d404f3-kube-api-access-6dwr6\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.126646 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" event={"ID":"48a013b4-3dbe-4944-b931-474ec989a214","Type":"ContainerStarted","Data":"653749eac1c3ef1b80609303926d7bfeb7f12bbb92c79f10be55d5a827c578f2"} Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.126717 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.134634 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-mktst" event={"ID":"b3ecedb0-b612-46f5-9de6-d17ac9d404f3","Type":"ContainerDied","Data":"b7c1fc447f0db55a37f2ffcc297b016d27c791690ae465a740a4eb99b1c288f3"} Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.134678 4755 scope.go:117] "RemoveContainer" containerID="ded49aa1075d4c3dcd65d6ffd5ab879869cba65ee0b263b63980b9b7aede4855" Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.134791 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56798b757f-mktst" Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.150664 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" podStartSLOduration=4.150645566 podStartE2EDuration="4.150645566s" podCreationTimestamp="2026-02-02 22:53:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:53:34.147930461 +0000 UTC m=+1169.839150777" watchObservedRunningTime="2026-02-02 22:53:34.150645566 +0000 UTC m=+1169.841865892" Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.166112 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b82ad4da-6bed-49c5-95bf-9bcedede60ca","Type":"ContainerStarted","Data":"a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6"} Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.169455 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"85fa16be-f206-433c-b1b0-2e083a4ad58b","Type":"ContainerStarted","Data":"65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098"} Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.234492 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-mktst"] Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.244143 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-mktst"] Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.249313 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.250518 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-qztdq" event={"ID":"dc2ddb51-960d-4838-b97f-1e635994cb0a","Type":"ContainerDied","Data":"b64b9f51e15514d455e8364d23193d80fdae091a5d9d17af19410dbdf098dd8f"} Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.250594 4755 scope.go:117] "RemoveContainer" containerID="13c57a197a1832331a116b8799e076f80445ce14757c069e737f4cdcaa570076" Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.387079 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-qztdq"] Feb 02 22:53:34 crc kubenswrapper[4755]: I0202 22:53:34.403044 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-qztdq"] Feb 02 22:53:35 crc kubenswrapper[4755]: I0202 22:53:35.084276 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3ecedb0-b612-46f5-9de6-d17ac9d404f3" path="/var/lib/kubelet/pods/b3ecedb0-b612-46f5-9de6-d17ac9d404f3/volumes" Feb 02 22:53:35 crc kubenswrapper[4755]: I0202 22:53:35.085005 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc2ddb51-960d-4838-b97f-1e635994cb0a" path="/var/lib/kubelet/pods/dc2ddb51-960d-4838-b97f-1e635994cb0a/volumes" Feb 02 22:53:35 crc kubenswrapper[4755]: I0202 22:53:35.267453 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b82ad4da-6bed-49c5-95bf-9bcedede60ca","Type":"ContainerStarted","Data":"7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded"} Feb 02 22:53:35 crc kubenswrapper[4755]: I0202 22:53:35.267643 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b82ad4da-6bed-49c5-95bf-9bcedede60ca" containerName="glance-log" containerID="cri-o://a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6" gracePeriod=30 Feb 02 22:53:35 crc kubenswrapper[4755]: I0202 22:53:35.267863 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b82ad4da-6bed-49c5-95bf-9bcedede60ca" containerName="glance-httpd" containerID="cri-o://7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded" gracePeriod=30 Feb 02 22:53:35 crc kubenswrapper[4755]: I0202 22:53:35.278997 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"85fa16be-f206-433c-b1b0-2e083a4ad58b","Type":"ContainerStarted","Data":"1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908"} Feb 02 22:53:35 crc kubenswrapper[4755]: I0202 22:53:35.279181 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="85fa16be-f206-433c-b1b0-2e083a4ad58b" containerName="glance-log" containerID="cri-o://65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098" gracePeriod=30 Feb 02 22:53:35 crc kubenswrapper[4755]: I0202 22:53:35.279296 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="85fa16be-f206-433c-b1b0-2e083a4ad58b" containerName="glance-httpd" containerID="cri-o://1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908" gracePeriod=30 Feb 02 22:53:35 crc kubenswrapper[4755]: I0202 22:53:35.308207 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.308185703 podStartE2EDuration="6.308185703s" podCreationTimestamp="2026-02-02 22:53:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:53:35.299372548 +0000 UTC m=+1170.990592864" watchObservedRunningTime="2026-02-02 22:53:35.308185703 +0000 UTC m=+1170.999406019" Feb 02 22:53:35 crc kubenswrapper[4755]: I0202 22:53:35.328168 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.328151728 podStartE2EDuration="6.328151728s" podCreationTimestamp="2026-02-02 22:53:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:53:35.323058156 +0000 UTC m=+1171.014278482" watchObservedRunningTime="2026-02-02 22:53:35.328151728 +0000 UTC m=+1171.019372054" Feb 02 22:53:35 crc kubenswrapper[4755]: I0202 22:53:35.971419 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.077461 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.086695 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-config-data\") pod \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.086789 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b82ad4da-6bed-49c5-95bf-9bcedede60ca-logs\") pod \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.086833 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-scripts\") pod \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.086856 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-combined-ca-bundle\") pod \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.086888 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7x25\" (UniqueName: \"kubernetes.io/projected/b82ad4da-6bed-49c5-95bf-9bcedede60ca-kube-api-access-l7x25\") pod \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.087155 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.087229 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b82ad4da-6bed-49c5-95bf-9bcedede60ca-httpd-run\") pod \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\" (UID: \"b82ad4da-6bed-49c5-95bf-9bcedede60ca\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.088043 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b82ad4da-6bed-49c5-95bf-9bcedede60ca-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b82ad4da-6bed-49c5-95bf-9bcedede60ca" (UID: "b82ad4da-6bed-49c5-95bf-9bcedede60ca"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.089798 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b82ad4da-6bed-49c5-95bf-9bcedede60ca-logs" (OuterVolumeSpecName: "logs") pod "b82ad4da-6bed-49c5-95bf-9bcedede60ca" (UID: "b82ad4da-6bed-49c5-95bf-9bcedede60ca"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.093453 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b82ad4da-6bed-49c5-95bf-9bcedede60ca-kube-api-access-l7x25" (OuterVolumeSpecName: "kube-api-access-l7x25") pod "b82ad4da-6bed-49c5-95bf-9bcedede60ca" (UID: "b82ad4da-6bed-49c5-95bf-9bcedede60ca"). InnerVolumeSpecName "kube-api-access-l7x25". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.095966 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-scripts" (OuterVolumeSpecName: "scripts") pod "b82ad4da-6bed-49c5-95bf-9bcedede60ca" (UID: "b82ad4da-6bed-49c5-95bf-9bcedede60ca"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.115337 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484" (OuterVolumeSpecName: "glance") pod "b82ad4da-6bed-49c5-95bf-9bcedede60ca" (UID: "b82ad4da-6bed-49c5-95bf-9bcedede60ca"). InnerVolumeSpecName "pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.124171 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b82ad4da-6bed-49c5-95bf-9bcedede60ca" (UID: "b82ad4da-6bed-49c5-95bf-9bcedede60ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.148487 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-config-data" (OuterVolumeSpecName: "config-data") pod "b82ad4da-6bed-49c5-95bf-9bcedede60ca" (UID: "b82ad4da-6bed-49c5-95bf-9bcedede60ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.188627 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/85fa16be-f206-433c-b1b0-2e083a4ad58b-httpd-run\") pod \"85fa16be-f206-433c-b1b0-2e083a4ad58b\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.188719 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85fa16be-f206-433c-b1b0-2e083a4ad58b-logs\") pod \"85fa16be-f206-433c-b1b0-2e083a4ad58b\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.189486 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85fa16be-f206-433c-b1b0-2e083a4ad58b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "85fa16be-f206-433c-b1b0-2e083a4ad58b" (UID: "85fa16be-f206-433c-b1b0-2e083a4ad58b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.189487 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85fa16be-f206-433c-b1b0-2e083a4ad58b-logs" (OuterVolumeSpecName: "logs") pod "85fa16be-f206-433c-b1b0-2e083a4ad58b" (UID: "85fa16be-f206-433c-b1b0-2e083a4ad58b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.189565 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-combined-ca-bundle\") pod \"85fa16be-f206-433c-b1b0-2e083a4ad58b\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.189756 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-km8bj\" (UniqueName: \"kubernetes.io/projected/85fa16be-f206-433c-b1b0-2e083a4ad58b-kube-api-access-km8bj\") pod \"85fa16be-f206-433c-b1b0-2e083a4ad58b\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.189796 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-config-data\") pod \"85fa16be-f206-433c-b1b0-2e083a4ad58b\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.189873 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-scripts\") pod \"85fa16be-f206-433c-b1b0-2e083a4ad58b\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.189971 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"85fa16be-f206-433c-b1b0-2e083a4ad58b\" (UID: \"85fa16be-f206-433c-b1b0-2e083a4ad58b\") " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.191059 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85fa16be-f206-433c-b1b0-2e083a4ad58b-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.191076 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.191086 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b82ad4da-6bed-49c5-95bf-9bcedede60ca-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.191095 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.191104 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b82ad4da-6bed-49c5-95bf-9bcedede60ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.191113 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7x25\" (UniqueName: \"kubernetes.io/projected/b82ad4da-6bed-49c5-95bf-9bcedede60ca-kube-api-access-l7x25\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.191134 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") on node \"crc\" " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.191143 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b82ad4da-6bed-49c5-95bf-9bcedede60ca-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.191152 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/85fa16be-f206-433c-b1b0-2e083a4ad58b-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.197507 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85fa16be-f206-433c-b1b0-2e083a4ad58b-kube-api-access-km8bj" (OuterVolumeSpecName: "kube-api-access-km8bj") pod "85fa16be-f206-433c-b1b0-2e083a4ad58b" (UID: "85fa16be-f206-433c-b1b0-2e083a4ad58b"). InnerVolumeSpecName "kube-api-access-km8bj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.202433 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-scripts" (OuterVolumeSpecName: "scripts") pod "85fa16be-f206-433c-b1b0-2e083a4ad58b" (UID: "85fa16be-f206-433c-b1b0-2e083a4ad58b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.218016 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854" (OuterVolumeSpecName: "glance") pod "85fa16be-f206-433c-b1b0-2e083a4ad58b" (UID: "85fa16be-f206-433c-b1b0-2e083a4ad58b"). InnerVolumeSpecName "pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.221276 4755 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.221417 4755 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484") on node "crc" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.222311 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85fa16be-f206-433c-b1b0-2e083a4ad58b" (UID: "85fa16be-f206-433c-b1b0-2e083a4ad58b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.241450 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-config-data" (OuterVolumeSpecName: "config-data") pod "85fa16be-f206-433c-b1b0-2e083a4ad58b" (UID: "85fa16be-f206-433c-b1b0-2e083a4ad58b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.293164 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.293206 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-km8bj\" (UniqueName: \"kubernetes.io/projected/85fa16be-f206-433c-b1b0-2e083a4ad58b-kube-api-access-km8bj\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.293220 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.293231 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85fa16be-f206-433c-b1b0-2e083a4ad58b-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.293273 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") on node \"crc\" " Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.293287 4755 reconciler_common.go:293] "Volume detached for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.307217 4755 generic.go:334] "Generic (PLEG): container finished" podID="b82ad4da-6bed-49c5-95bf-9bcedede60ca" containerID="7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded" exitCode=0 Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.307257 4755 generic.go:334] "Generic (PLEG): container finished" podID="b82ad4da-6bed-49c5-95bf-9bcedede60ca" containerID="a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6" exitCode=143 Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.307405 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.307990 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b82ad4da-6bed-49c5-95bf-9bcedede60ca","Type":"ContainerDied","Data":"7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded"} Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.308047 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b82ad4da-6bed-49c5-95bf-9bcedede60ca","Type":"ContainerDied","Data":"a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6"} Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.308060 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b82ad4da-6bed-49c5-95bf-9bcedede60ca","Type":"ContainerDied","Data":"eb15b637cc2a4b19b3e9f6b410bff9c95d4ac5791a594546e529384d6677364f"} Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.308076 4755 scope.go:117] "RemoveContainer" containerID="7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.311510 4755 generic.go:334] "Generic (PLEG): container finished" podID="85fa16be-f206-433c-b1b0-2e083a4ad58b" containerID="1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908" exitCode=0 Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.311535 4755 generic.go:334] "Generic (PLEG): container finished" podID="85fa16be-f206-433c-b1b0-2e083a4ad58b" containerID="65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098" exitCode=143 Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.311552 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.311553 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"85fa16be-f206-433c-b1b0-2e083a4ad58b","Type":"ContainerDied","Data":"1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908"} Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.311699 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"85fa16be-f206-433c-b1b0-2e083a4ad58b","Type":"ContainerDied","Data":"65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098"} Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.311716 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"85fa16be-f206-433c-b1b0-2e083a4ad58b","Type":"ContainerDied","Data":"aa20cc357fbd6eed8ba84e6a5a632f34ce2e89672a9c45e7b7df0d13119b7f96"} Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.339605 4755 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.339954 4755 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854") on node "crc" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.359094 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.370257 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.380970 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.395098 4755 reconciler_common.go:293] "Volume detached for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.410757 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:53:36 crc kubenswrapper[4755]: E0202 22:53:36.411201 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85fa16be-f206-433c-b1b0-2e083a4ad58b" containerName="glance-httpd" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.411218 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="85fa16be-f206-433c-b1b0-2e083a4ad58b" containerName="glance-httpd" Feb 02 22:53:36 crc kubenswrapper[4755]: E0202 22:53:36.411232 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc2ddb51-960d-4838-b97f-1e635994cb0a" containerName="init" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.411238 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc2ddb51-960d-4838-b97f-1e635994cb0a" containerName="init" Feb 02 22:53:36 crc kubenswrapper[4755]: E0202 22:53:36.411259 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b82ad4da-6bed-49c5-95bf-9bcedede60ca" containerName="glance-httpd" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.411265 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b82ad4da-6bed-49c5-95bf-9bcedede60ca" containerName="glance-httpd" Feb 02 22:53:36 crc kubenswrapper[4755]: E0202 22:53:36.411278 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85fa16be-f206-433c-b1b0-2e083a4ad58b" containerName="glance-log" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.411283 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="85fa16be-f206-433c-b1b0-2e083a4ad58b" containerName="glance-log" Feb 02 22:53:36 crc kubenswrapper[4755]: E0202 22:53:36.411295 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3ecedb0-b612-46f5-9de6-d17ac9d404f3" containerName="init" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.411300 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3ecedb0-b612-46f5-9de6-d17ac9d404f3" containerName="init" Feb 02 22:53:36 crc kubenswrapper[4755]: E0202 22:53:36.411309 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b82ad4da-6bed-49c5-95bf-9bcedede60ca" containerName="glance-log" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.411315 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b82ad4da-6bed-49c5-95bf-9bcedede60ca" containerName="glance-log" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.411487 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="85fa16be-f206-433c-b1b0-2e083a4ad58b" containerName="glance-log" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.411503 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3ecedb0-b612-46f5-9de6-d17ac9d404f3" containerName="init" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.411517 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc2ddb51-960d-4838-b97f-1e635994cb0a" containerName="init" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.411540 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b82ad4da-6bed-49c5-95bf-9bcedede60ca" containerName="glance-httpd" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.411551 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b82ad4da-6bed-49c5-95bf-9bcedede60ca" containerName="glance-log" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.411564 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="85fa16be-f206-433c-b1b0-2e083a4ad58b" containerName="glance-httpd" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.412570 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.416751 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.416916 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2l6wf" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.417047 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.435005 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.461975 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.496670 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.502435 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.504004 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.513077 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.598997 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599060 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trlf2\" (UniqueName: \"kubernetes.io/projected/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-kube-api-access-trlf2\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599118 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-logs\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599139 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-config-data\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599155 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599212 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599246 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-logs\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599269 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599444 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjxkr\" (UniqueName: \"kubernetes.io/projected/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-kube-api-access-bjxkr\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599522 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599642 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599675 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599720 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-scripts\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.599809 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704108 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704168 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704211 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-scripts\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704239 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704298 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704324 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trlf2\" (UniqueName: \"kubernetes.io/projected/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-kube-api-access-trlf2\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704372 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-logs\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704399 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-config-data\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704415 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704457 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704680 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-logs\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704706 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704780 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjxkr\" (UniqueName: \"kubernetes.io/projected/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-kube-api-access-bjxkr\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.704798 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.705233 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.705497 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.705967 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-logs\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.706211 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-logs\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.708859 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.708863 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-config-data\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.708978 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-scripts\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.709022 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.712093 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.713676 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.713718 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/12ae2e05c1109de2cfa2799707fdeed95eed1c35304f00e5efa94d1e550db555/globalmount\"" pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.714462 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.719946 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.719974 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/36a0f8b995f8d37a8776a91ffef55e84b7ae73b259c9d13bbc3129ab0c1d828a/globalmount\"" pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.723150 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjxkr\" (UniqueName: \"kubernetes.io/projected/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-kube-api-access-bjxkr\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.732257 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trlf2\" (UniqueName: \"kubernetes.io/projected/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-kube-api-access-trlf2\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.793989 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.809679 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.846196 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:53:36 crc kubenswrapper[4755]: I0202 22:53:36.856589 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:53:37 crc kubenswrapper[4755]: I0202 22:53:37.085000 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85fa16be-f206-433c-b1b0-2e083a4ad58b" path="/var/lib/kubelet/pods/85fa16be-f206-433c-b1b0-2e083a4ad58b/volumes" Feb 02 22:53:37 crc kubenswrapper[4755]: I0202 22:53:37.086058 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b82ad4da-6bed-49c5-95bf-9bcedede60ca" path="/var/lib/kubelet/pods/b82ad4da-6bed-49c5-95bf-9bcedede60ca/volumes" Feb 02 22:53:37 crc kubenswrapper[4755]: I0202 22:53:37.221667 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:37 crc kubenswrapper[4755]: I0202 22:53:37.332687 4755 generic.go:334] "Generic (PLEG): container finished" podID="3116a356-a337-47ea-89fc-6bdf583db10f" containerID="ad7b31f28ff90ca5b9e23cc67681ed4153a56ca002a9ead25fc3c089b7ff456f" exitCode=0 Feb 02 22:53:37 crc kubenswrapper[4755]: I0202 22:53:37.332872 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bjfzp" event={"ID":"3116a356-a337-47ea-89fc-6bdf583db10f","Type":"ContainerDied","Data":"ad7b31f28ff90ca5b9e23cc67681ed4153a56ca002a9ead25fc3c089b7ff456f"} Feb 02 22:53:39 crc kubenswrapper[4755]: I0202 22:53:39.711173 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:53:39 crc kubenswrapper[4755]: I0202 22:53:39.771461 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:53:39 crc kubenswrapper[4755]: I0202 22:53:39.815379 4755 scope.go:117] "RemoveContainer" containerID="a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6" Feb 02 22:53:39 crc kubenswrapper[4755]: I0202 22:53:39.911911 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:39 crc kubenswrapper[4755]: I0202 22:53:39.994499 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-scripts\") pod \"3116a356-a337-47ea-89fc-6bdf583db10f\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " Feb 02 22:53:39 crc kubenswrapper[4755]: I0202 22:53:39.994616 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-fernet-keys\") pod \"3116a356-a337-47ea-89fc-6bdf583db10f\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " Feb 02 22:53:39 crc kubenswrapper[4755]: I0202 22:53:39.994658 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-credential-keys\") pod \"3116a356-a337-47ea-89fc-6bdf583db10f\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " Feb 02 22:53:39 crc kubenswrapper[4755]: I0202 22:53:39.994681 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8f78\" (UniqueName: \"kubernetes.io/projected/3116a356-a337-47ea-89fc-6bdf583db10f-kube-api-access-c8f78\") pod \"3116a356-a337-47ea-89fc-6bdf583db10f\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " Feb 02 22:53:39 crc kubenswrapper[4755]: I0202 22:53:39.994707 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-combined-ca-bundle\") pod \"3116a356-a337-47ea-89fc-6bdf583db10f\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " Feb 02 22:53:39 crc kubenswrapper[4755]: I0202 22:53:39.994856 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-config-data\") pod \"3116a356-a337-47ea-89fc-6bdf583db10f\" (UID: \"3116a356-a337-47ea-89fc-6bdf583db10f\") " Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.004714 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3116a356-a337-47ea-89fc-6bdf583db10f" (UID: "3116a356-a337-47ea-89fc-6bdf583db10f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.005305 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-scripts" (OuterVolumeSpecName: "scripts") pod "3116a356-a337-47ea-89fc-6bdf583db10f" (UID: "3116a356-a337-47ea-89fc-6bdf583db10f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.017120 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3116a356-a337-47ea-89fc-6bdf583db10f" (UID: "3116a356-a337-47ea-89fc-6bdf583db10f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.024363 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3116a356-a337-47ea-89fc-6bdf583db10f-kube-api-access-c8f78" (OuterVolumeSpecName: "kube-api-access-c8f78") pod "3116a356-a337-47ea-89fc-6bdf583db10f" (UID: "3116a356-a337-47ea-89fc-6bdf583db10f"). InnerVolumeSpecName "kube-api-access-c8f78". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.026987 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3116a356-a337-47ea-89fc-6bdf583db10f" (UID: "3116a356-a337-47ea-89fc-6bdf583db10f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.050941 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-config-data" (OuterVolumeSpecName: "config-data") pod "3116a356-a337-47ea-89fc-6bdf583db10f" (UID: "3116a356-a337-47ea-89fc-6bdf583db10f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.105992 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.106235 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.106243 4755 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.106251 4755 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.106263 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8f78\" (UniqueName: \"kubernetes.io/projected/3116a356-a337-47ea-89fc-6bdf583db10f-kube-api-access-c8f78\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.106272 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3116a356-a337-47ea-89fc-6bdf583db10f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.368099 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bjfzp" event={"ID":"3116a356-a337-47ea-89fc-6bdf583db10f","Type":"ContainerDied","Data":"e78b06eeab6351b5d2caf23f31e5e563938e396250852cd430074e5f3922fac1"} Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.368136 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e78b06eeab6351b5d2caf23f31e5e563938e396250852cd430074e5f3922fac1" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.368182 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bjfzp" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.862106 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.926696 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-fzxs2"] Feb 02 22:53:40 crc kubenswrapper[4755]: I0202 22:53:40.926939 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" podUID="6cf5da5b-b25b-4a10-a05c-a82300fe094b" containerName="dnsmasq-dns" containerID="cri-o://919f111e4046619309b3de180d30ff225ab8d08eb07810b870554a01e7753c0e" gracePeriod=10 Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.039817 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-bjfzp"] Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.048128 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-bjfzp"] Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.083858 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3116a356-a337-47ea-89fc-6bdf583db10f" path="/var/lib/kubelet/pods/3116a356-a337-47ea-89fc-6bdf583db10f/volumes" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.156119 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-55dvt"] Feb 02 22:53:41 crc kubenswrapper[4755]: E0202 22:53:41.156541 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3116a356-a337-47ea-89fc-6bdf583db10f" containerName="keystone-bootstrap" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.156556 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3116a356-a337-47ea-89fc-6bdf583db10f" containerName="keystone-bootstrap" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.156785 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3116a356-a337-47ea-89fc-6bdf583db10f" containerName="keystone-bootstrap" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.157520 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.160399 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pvh95" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.160711 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.160970 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.167031 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.178130 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-55dvt"] Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.229501 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-fernet-keys\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.229927 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-scripts\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.229992 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vl7tp\" (UniqueName: \"kubernetes.io/projected/4f55b2a0-624c-46b1-bede-8cb15264838e-kube-api-access-vl7tp\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.230048 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-config-data\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.230116 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-credential-keys\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.230170 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-combined-ca-bundle\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.332579 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-config-data\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.332670 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-credential-keys\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.332751 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-combined-ca-bundle\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.333084 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-fernet-keys\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.333148 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-scripts\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.333254 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vl7tp\" (UniqueName: \"kubernetes.io/projected/4f55b2a0-624c-46b1-bede-8cb15264838e-kube-api-access-vl7tp\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.351782 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-credential-keys\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.352339 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-fernet-keys\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.353080 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-combined-ca-bundle\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.353123 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-config-data\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.359282 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-scripts\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.365782 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vl7tp\" (UniqueName: \"kubernetes.io/projected/4f55b2a0-624c-46b1-bede-8cb15264838e-kube-api-access-vl7tp\") pod \"keystone-bootstrap-55dvt\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.383887 4755 generic.go:334] "Generic (PLEG): container finished" podID="6cf5da5b-b25b-4a10-a05c-a82300fe094b" containerID="919f111e4046619309b3de180d30ff225ab8d08eb07810b870554a01e7753c0e" exitCode=0 Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.383970 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" event={"ID":"6cf5da5b-b25b-4a10-a05c-a82300fe094b","Type":"ContainerDied","Data":"919f111e4046619309b3de180d30ff225ab8d08eb07810b870554a01e7753c0e"} Feb 02 22:53:41 crc kubenswrapper[4755]: I0202 22:53:41.484398 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:53:42 crc kubenswrapper[4755]: I0202 22:53:42.306611 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" podUID="6cf5da5b-b25b-4a10-a05c-a82300fe094b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.131:5353: connect: connection refused" Feb 02 22:53:47 crc kubenswrapper[4755]: I0202 22:53:47.221853 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:47 crc kubenswrapper[4755]: I0202 22:53:47.230124 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:47 crc kubenswrapper[4755]: I0202 22:53:47.308562 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" podUID="6cf5da5b-b25b-4a10-a05c-a82300fe094b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.131:5353: connect: connection refused" Feb 02 22:53:47 crc kubenswrapper[4755]: I0202 22:53:47.446118 4755 generic.go:334] "Generic (PLEG): container finished" podID="45b91834-5ada-4402-85c4-df681a85c076" containerID="1720f86c20eca7c8b729f20ca7a07c4f32522ff9478534f117fa8228cc7d58a1" exitCode=0 Feb 02 22:53:47 crc kubenswrapper[4755]: I0202 22:53:47.446174 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2vwzg" event={"ID":"45b91834-5ada-4402-85c4-df681a85c076","Type":"ContainerDied","Data":"1720f86c20eca7c8b729f20ca7a07c4f32522ff9478534f117fa8228cc7d58a1"} Feb 02 22:53:47 crc kubenswrapper[4755]: I0202 22:53:47.451821 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Feb 02 22:53:53 crc kubenswrapper[4755]: I0202 22:53:53.389799 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:53:53 crc kubenswrapper[4755]: I0202 22:53:53.390230 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:53:53 crc kubenswrapper[4755]: I0202 22:53:53.390271 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:53:53 crc kubenswrapper[4755]: I0202 22:53:53.390967 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b7878a61f8677fe4ed7b8526051e4c43447e019572d069fa0c208b41ce260865"} pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 22:53:53 crc kubenswrapper[4755]: I0202 22:53:53.391025 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" containerID="cri-o://b7878a61f8677fe4ed7b8526051e4c43447e019572d069fa0c208b41ce260865" gracePeriod=600 Feb 02 22:53:54 crc kubenswrapper[4755]: I0202 22:53:54.526196 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerID="b7878a61f8677fe4ed7b8526051e4c43447e019572d069fa0c208b41ce260865" exitCode=0 Feb 02 22:53:54 crc kubenswrapper[4755]: I0202 22:53:54.526250 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerDied","Data":"b7878a61f8677fe4ed7b8526051e4c43447e019572d069fa0c208b41ce260865"} Feb 02 22:53:55 crc kubenswrapper[4755]: E0202 22:53:55.602984 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Feb 02 22:53:55 crc kubenswrapper[4755]: E0202 22:53:55.603173 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nkdd7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-xwr7s_openstack(e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 22:53:55 crc kubenswrapper[4755]: E0202 22:53:55.604285 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-xwr7s" podUID="e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda" Feb 02 22:53:55 crc kubenswrapper[4755]: I0202 22:53:55.605506 4755 scope.go:117] "RemoveContainer" containerID="7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded" Feb 02 22:53:55 crc kubenswrapper[4755]: E0202 22:53:55.606547 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded\": container with ID starting with 7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded not found: ID does not exist" containerID="7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded" Feb 02 22:53:55 crc kubenswrapper[4755]: I0202 22:53:55.606616 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded"} err="failed to get container status \"7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded\": rpc error: code = NotFound desc = could not find container \"7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded\": container with ID starting with 7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded not found: ID does not exist" Feb 02 22:53:55 crc kubenswrapper[4755]: I0202 22:53:55.606676 4755 scope.go:117] "RemoveContainer" containerID="a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6" Feb 02 22:53:55 crc kubenswrapper[4755]: E0202 22:53:55.607310 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6\": container with ID starting with a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6 not found: ID does not exist" containerID="a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6" Feb 02 22:53:55 crc kubenswrapper[4755]: I0202 22:53:55.607378 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6"} err="failed to get container status \"a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6\": rpc error: code = NotFound desc = could not find container \"a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6\": container with ID starting with a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6 not found: ID does not exist" Feb 02 22:53:55 crc kubenswrapper[4755]: I0202 22:53:55.607417 4755 scope.go:117] "RemoveContainer" containerID="7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded" Feb 02 22:53:55 crc kubenswrapper[4755]: I0202 22:53:55.608525 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded"} err="failed to get container status \"7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded\": rpc error: code = NotFound desc = could not find container \"7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded\": container with ID starting with 7788867d78cd92f1a1f50b86f7937c250c00a04b562ac335d0b86c268bf1dded not found: ID does not exist" Feb 02 22:53:55 crc kubenswrapper[4755]: I0202 22:53:55.608567 4755 scope.go:117] "RemoveContainer" containerID="a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6" Feb 02 22:53:55 crc kubenswrapper[4755]: I0202 22:53:55.609047 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6"} err="failed to get container status \"a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6\": rpc error: code = NotFound desc = could not find container \"a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6\": container with ID starting with a276e04390c32fb71f5aa998fee416f5a9a2ac986495d5d82303f7b4716077e6 not found: ID does not exist" Feb 02 22:53:55 crc kubenswrapper[4755]: I0202 22:53:55.609078 4755 scope.go:117] "RemoveContainer" containerID="1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.045338 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.050266 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.167988 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-ovsdbserver-sb\") pod \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.168140 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-config\") pod \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.168284 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b91834-5ada-4402-85c4-df681a85c076-combined-ca-bundle\") pod \"45b91834-5ada-4402-85c4-df681a85c076\" (UID: \"45b91834-5ada-4402-85c4-df681a85c076\") " Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.168540 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/45b91834-5ada-4402-85c4-df681a85c076-config\") pod \"45b91834-5ada-4402-85c4-df681a85c076\" (UID: \"45b91834-5ada-4402-85c4-df681a85c076\") " Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.168609 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-dns-svc\") pod \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.168703 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlq8k\" (UniqueName: \"kubernetes.io/projected/45b91834-5ada-4402-85c4-df681a85c076-kube-api-access-vlq8k\") pod \"45b91834-5ada-4402-85c4-df681a85c076\" (UID: \"45b91834-5ada-4402-85c4-df681a85c076\") " Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.168770 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vswfw\" (UniqueName: \"kubernetes.io/projected/6cf5da5b-b25b-4a10-a05c-a82300fe094b-kube-api-access-vswfw\") pod \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.168807 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-ovsdbserver-nb\") pod \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\" (UID: \"6cf5da5b-b25b-4a10-a05c-a82300fe094b\") " Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.172656 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45b91834-5ada-4402-85c4-df681a85c076-kube-api-access-vlq8k" (OuterVolumeSpecName: "kube-api-access-vlq8k") pod "45b91834-5ada-4402-85c4-df681a85c076" (UID: "45b91834-5ada-4402-85c4-df681a85c076"). InnerVolumeSpecName "kube-api-access-vlq8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.174579 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cf5da5b-b25b-4a10-a05c-a82300fe094b-kube-api-access-vswfw" (OuterVolumeSpecName: "kube-api-access-vswfw") pod "6cf5da5b-b25b-4a10-a05c-a82300fe094b" (UID: "6cf5da5b-b25b-4a10-a05c-a82300fe094b"). InnerVolumeSpecName "kube-api-access-vswfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.200560 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b91834-5ada-4402-85c4-df681a85c076-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45b91834-5ada-4402-85c4-df681a85c076" (UID: "45b91834-5ada-4402-85c4-df681a85c076"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.209990 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6cf5da5b-b25b-4a10-a05c-a82300fe094b" (UID: "6cf5da5b-b25b-4a10-a05c-a82300fe094b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.217281 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b91834-5ada-4402-85c4-df681a85c076-config" (OuterVolumeSpecName: "config") pod "45b91834-5ada-4402-85c4-df681a85c076" (UID: "45b91834-5ada-4402-85c4-df681a85c076"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.223924 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-config" (OuterVolumeSpecName: "config") pod "6cf5da5b-b25b-4a10-a05c-a82300fe094b" (UID: "6cf5da5b-b25b-4a10-a05c-a82300fe094b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.230488 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6cf5da5b-b25b-4a10-a05c-a82300fe094b" (UID: "6cf5da5b-b25b-4a10-a05c-a82300fe094b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.237071 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6cf5da5b-b25b-4a10-a05c-a82300fe094b" (UID: "6cf5da5b-b25b-4a10-a05c-a82300fe094b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.272480 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b91834-5ada-4402-85c4-df681a85c076-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.272516 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/45b91834-5ada-4402-85c4-df681a85c076-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.272526 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.272537 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlq8k\" (UniqueName: \"kubernetes.io/projected/45b91834-5ada-4402-85c4-df681a85c076-kube-api-access-vlq8k\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.272551 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vswfw\" (UniqueName: \"kubernetes.io/projected/6cf5da5b-b25b-4a10-a05c-a82300fe094b-kube-api-access-vswfw\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.272559 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.272567 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.272578 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cf5da5b-b25b-4a10-a05c-a82300fe094b-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.556866 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" event={"ID":"6cf5da5b-b25b-4a10-a05c-a82300fe094b","Type":"ContainerDied","Data":"06a010e0929c73dd3c961de1e06f37a94fdcc73eef730202f595f38165615e78"} Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.557019 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.562625 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2vwzg" event={"ID":"45b91834-5ada-4402-85c4-df681a85c076","Type":"ContainerDied","Data":"2b3394469bac09fd3c6d22a61dc42ee2b532d97569cd3dbb925365da74ec9a5b"} Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.562665 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b3394469bac09fd3c6d22a61dc42ee2b532d97569cd3dbb925365da74ec9a5b" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.562763 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2vwzg" Feb 02 22:53:56 crc kubenswrapper[4755]: E0202 22:53:56.569756 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-xwr7s" podUID="e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda" Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.622062 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-fzxs2"] Feb 02 22:53:56 crc kubenswrapper[4755]: I0202 22:53:56.629336 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-fzxs2"] Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.088030 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cf5da5b-b25b-4a10-a05c-a82300fe094b" path="/var/lib/kubelet/pods/6cf5da5b-b25b-4a10-a05c-a82300fe094b/volumes" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.306212 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-fzxs2" podUID="6cf5da5b-b25b-4a10-a05c-a82300fe094b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.131:5353: i/o timeout" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.401121 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-jt9ln"] Feb 02 22:53:57 crc kubenswrapper[4755]: E0202 22:53:57.401927 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45b91834-5ada-4402-85c4-df681a85c076" containerName="neutron-db-sync" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.401953 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="45b91834-5ada-4402-85c4-df681a85c076" containerName="neutron-db-sync" Feb 02 22:53:57 crc kubenswrapper[4755]: E0202 22:53:57.401976 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cf5da5b-b25b-4a10-a05c-a82300fe094b" containerName="init" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.401982 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cf5da5b-b25b-4a10-a05c-a82300fe094b" containerName="init" Feb 02 22:53:57 crc kubenswrapper[4755]: E0202 22:53:57.402005 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cf5da5b-b25b-4a10-a05c-a82300fe094b" containerName="dnsmasq-dns" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.402012 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cf5da5b-b25b-4a10-a05c-a82300fe094b" containerName="dnsmasq-dns" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.402172 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cf5da5b-b25b-4a10-a05c-a82300fe094b" containerName="dnsmasq-dns" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.402197 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="45b91834-5ada-4402-85c4-df681a85c076" containerName="neutron-db-sync" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.422301 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-jt9ln"] Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.422399 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.500963 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x227b\" (UniqueName: \"kubernetes.io/projected/73a43f85-781b-4f83-b18d-0d69e6d272e0-kube-api-access-x227b\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.501064 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.501088 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.501152 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.501193 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-dns-svc\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.501215 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-config\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.501644 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-85c9cbd9b8-2tlmh"] Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.504113 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.506155 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-5g72q" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.506702 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.507819 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.508081 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.522664 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-85c9cbd9b8-2tlmh"] Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.602269 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-config\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.602327 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x227b\" (UniqueName: \"kubernetes.io/projected/73a43f85-781b-4f83-b18d-0d69e6d272e0-kube-api-access-x227b\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.602376 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-combined-ca-bundle\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.602421 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-httpd-config\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.602452 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.602473 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnsn9\" (UniqueName: \"kubernetes.io/projected/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-kube-api-access-pnsn9\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.602492 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.602543 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-ovndb-tls-certs\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.602560 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-config\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.602715 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.602854 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-dns-svc\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.603231 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.603416 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-config\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.603471 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.603847 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-dns-svc\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.604164 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.624249 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x227b\" (UniqueName: \"kubernetes.io/projected/73a43f85-781b-4f83-b18d-0d69e6d272e0-kube-api-access-x227b\") pod \"dnsmasq-dns-6b7b667979-jt9ln\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.704497 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnsn9\" (UniqueName: \"kubernetes.io/projected/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-kube-api-access-pnsn9\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.704618 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-ovndb-tls-certs\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.704645 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-config\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.704791 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-combined-ca-bundle\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.704848 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-httpd-config\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.709849 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-httpd-config\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.710028 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-combined-ca-bundle\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.710907 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-ovndb-tls-certs\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.711239 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-config\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.721694 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnsn9\" (UniqueName: \"kubernetes.io/projected/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-kube-api-access-pnsn9\") pod \"neutron-85c9cbd9b8-2tlmh\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.746151 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:53:57 crc kubenswrapper[4755]: I0202 22:53:57.817489 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:53:59 crc kubenswrapper[4755]: I0202 22:53:59.834802 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-96d6dcbbf-js6bv"] Feb 02 22:53:59 crc kubenswrapper[4755]: I0202 22:53:59.837111 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:53:59 crc kubenswrapper[4755]: I0202 22:53:59.839775 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Feb 02 22:53:59 crc kubenswrapper[4755]: I0202 22:53:59.840031 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Feb 02 22:53:59 crc kubenswrapper[4755]: I0202 22:53:59.859506 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-96d6dcbbf-js6bv"] Feb 02 22:53:59 crc kubenswrapper[4755]: I0202 22:53:59.969044 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-httpd-config\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:53:59 crc kubenswrapper[4755]: I0202 22:53:59.969094 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-config\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:53:59 crc kubenswrapper[4755]: I0202 22:53:59.969142 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-combined-ca-bundle\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:53:59 crc kubenswrapper[4755]: I0202 22:53:59.969161 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-ovndb-tls-certs\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:53:59 crc kubenswrapper[4755]: I0202 22:53:59.969179 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-public-tls-certs\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:53:59 crc kubenswrapper[4755]: I0202 22:53:59.969206 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jtz9\" (UniqueName: \"kubernetes.io/projected/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-kube-api-access-7jtz9\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:53:59 crc kubenswrapper[4755]: I0202 22:53:59.969223 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-internal-tls-certs\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.070770 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-httpd-config\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.070843 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-config\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.070920 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-combined-ca-bundle\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.070937 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-ovndb-tls-certs\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.070963 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-public-tls-certs\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.071043 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jtz9\" (UniqueName: \"kubernetes.io/projected/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-kube-api-access-7jtz9\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.071960 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-internal-tls-certs\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.076350 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-combined-ca-bundle\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.077365 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-config\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.078320 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-ovndb-tls-certs\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.080255 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-internal-tls-certs\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.080607 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-httpd-config\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.088150 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-public-tls-certs\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.091636 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jtz9\" (UniqueName: \"kubernetes.io/projected/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-kube-api-access-7jtz9\") pod \"neutron-96d6dcbbf-js6bv\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.214393 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:00 crc kubenswrapper[4755]: E0202 22:54:00.254214 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Feb 02 22:54:00 crc kubenswrapper[4755]: E0202 22:54:00.254486 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n67bd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-pwclg_openstack(0403cc43-6199-4e95-b427-c4f268d8049a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 22:54:00 crc kubenswrapper[4755]: E0202 22:54:00.255596 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-pwclg" podUID="0403cc43-6199-4e95-b427-c4f268d8049a" Feb 02 22:54:00 crc kubenswrapper[4755]: E0202 22:54:00.618884 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-pwclg" podUID="0403cc43-6199-4e95-b427-c4f268d8049a" Feb 02 22:54:00 crc kubenswrapper[4755]: I0202 22:54:00.762689 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:54:01 crc kubenswrapper[4755]: I0202 22:54:01.327276 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:54:02 crc kubenswrapper[4755]: I0202 22:54:02.830756 4755 scope.go:117] "RemoveContainer" containerID="65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098" Feb 02 22:54:03 crc kubenswrapper[4755]: I0202 22:54:03.277654 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-55dvt"] Feb 02 22:54:03 crc kubenswrapper[4755]: I0202 22:54:03.648653 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2","Type":"ContainerStarted","Data":"154d4c16a0cd44e9bab6736091068087bc68df248600090318d97853f809adb4"} Feb 02 22:54:03 crc kubenswrapper[4755]: I0202 22:54:03.652076 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c3d93484-a8c3-4a2f-b2e0-22c007d77b11","Type":"ContainerStarted","Data":"003d113fd6d22a83ac0c685ef19630937f85c1385f65b4169e6f83d37cf442dc"} Feb 02 22:54:05 crc kubenswrapper[4755]: I0202 22:54:05.689799 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-55dvt" event={"ID":"4f55b2a0-624c-46b1-bede-8cb15264838e","Type":"ContainerStarted","Data":"d5686bfa084c6c2a2acf0d418ab74db1409f2741c0aaadf4317165875a3bb639"} Feb 02 22:54:06 crc kubenswrapper[4755]: I0202 22:54:06.924474 4755 scope.go:117] "RemoveContainer" containerID="1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908" Feb 02 22:54:06 crc kubenswrapper[4755]: E0202 22:54:06.926891 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908\": container with ID starting with 1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908 not found: ID does not exist" containerID="1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908" Feb 02 22:54:06 crc kubenswrapper[4755]: I0202 22:54:06.926967 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908"} err="failed to get container status \"1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908\": rpc error: code = NotFound desc = could not find container \"1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908\": container with ID starting with 1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908 not found: ID does not exist" Feb 02 22:54:06 crc kubenswrapper[4755]: I0202 22:54:06.927015 4755 scope.go:117] "RemoveContainer" containerID="65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098" Feb 02 22:54:06 crc kubenswrapper[4755]: E0202 22:54:06.927691 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098\": container with ID starting with 65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098 not found: ID does not exist" containerID="65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098" Feb 02 22:54:06 crc kubenswrapper[4755]: I0202 22:54:06.928298 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098"} err="failed to get container status \"65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098\": rpc error: code = NotFound desc = could not find container \"65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098\": container with ID starting with 65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098 not found: ID does not exist" Feb 02 22:54:06 crc kubenswrapper[4755]: I0202 22:54:06.928349 4755 scope.go:117] "RemoveContainer" containerID="1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908" Feb 02 22:54:06 crc kubenswrapper[4755]: I0202 22:54:06.932308 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908"} err="failed to get container status \"1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908\": rpc error: code = NotFound desc = could not find container \"1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908\": container with ID starting with 1014b9202d0708c8335e738ccd3097bb233ccab867b65bda13bea10261a39908 not found: ID does not exist" Feb 02 22:54:06 crc kubenswrapper[4755]: I0202 22:54:06.932366 4755 scope.go:117] "RemoveContainer" containerID="65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098" Feb 02 22:54:06 crc kubenswrapper[4755]: I0202 22:54:06.939896 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098"} err="failed to get container status \"65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098\": rpc error: code = NotFound desc = could not find container \"65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098\": container with ID starting with 65f27d997f8fe94cbf964bf4ae505777c055892b1517769229a01fca4c42d098 not found: ID does not exist" Feb 02 22:54:06 crc kubenswrapper[4755]: I0202 22:54:06.939965 4755 scope.go:117] "RemoveContainer" containerID="b3e3969df739edd98047f8857204b723c7cae6ce3d65529d90b43e5d926f70bf" Feb 02 22:54:06 crc kubenswrapper[4755]: E0202 22:54:06.974895 4755 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Feb 02 22:54:06 crc kubenswrapper[4755]: E0202 22:54:06.974989 4755 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Feb 02 22:54:06 crc kubenswrapper[4755]: E0202 22:54:06.975241 4755 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-htwfg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42406,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-75zwb_openstack(bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 22:54:06 crc kubenswrapper[4755]: E0202 22:54:06.976533 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-75zwb" podUID="bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe" Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.091883 4755 scope.go:117] "RemoveContainer" containerID="919f111e4046619309b3de180d30ff225ab8d08eb07810b870554a01e7753c0e" Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.270970 4755 scope.go:117] "RemoveContainer" containerID="8a6be0e50f6792220f047fa24b32e5af2433a25e2da576096be7de439bc8dc2f" Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.507358 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-jt9ln"] Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.574210 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-85c9cbd9b8-2tlmh"] Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.768380 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sk4gl" event={"ID":"27d7e0ea-4abd-4afe-be9b-460fbfea81c7","Type":"ContainerStarted","Data":"9115f92997d485b695ce9cd0af538bbda2028bf538d2e9d879f856d82c88c465"} Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.790176 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" event={"ID":"73a43f85-781b-4f83-b18d-0d69e6d272e0","Type":"ContainerStarted","Data":"59fea455a5a2c9344c52db863425754f83889948c44b949ad579dcedb3886d3a"} Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.792439 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-sk4gl" podStartSLOduration=14.362360261 podStartE2EDuration="38.792413102s" podCreationTimestamp="2026-02-02 22:53:29 +0000 UTC" firstStartedPulling="2026-02-02 22:53:31.50499576 +0000 UTC m=+1167.196216096" lastFinishedPulling="2026-02-02 22:53:55.935048611 +0000 UTC m=+1191.626268937" observedRunningTime="2026-02-02 22:54:07.784955354 +0000 UTC m=+1203.476175690" watchObservedRunningTime="2026-02-02 22:54:07.792413102 +0000 UTC m=+1203.483633458" Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.794457 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8","Type":"ContainerStarted","Data":"52bac8d44a3fc4485ca16814830e3f2bdab966bbab186b22e4a119fc492f0fec"} Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.800256 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-55dvt" event={"ID":"4f55b2a0-624c-46b1-bede-8cb15264838e","Type":"ContainerStarted","Data":"93fab4df7bcf4224d2cfaa86fd176eddac2020cd566c469af102de4402e8778f"} Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.808592 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"e1a0edb6bc3318168553c3186dbd5ca8239787806078b7f1d8e7cf50cd938918"} Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.821992 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85c9cbd9b8-2tlmh" event={"ID":"107d5dc0-af83-4833-bf2f-cd99d0e1f15d","Type":"ContainerStarted","Data":"69e8651d5f6e819cbabc2b4746acc52c19b5c08412179f776a7a30f3355d24f8"} Feb 02 22:54:07 crc kubenswrapper[4755]: E0202 22:54:07.822966 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current\\\"\"" pod="openstack/cloudkitty-db-sync-75zwb" podUID="bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe" Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.832039 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-96d6dcbbf-js6bv"] Feb 02 22:54:07 crc kubenswrapper[4755]: I0202 22:54:07.832348 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-55dvt" podStartSLOduration=26.83233019 podStartE2EDuration="26.83233019s" podCreationTimestamp="2026-02-02 22:53:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:07.818409524 +0000 UTC m=+1203.509629850" watchObservedRunningTime="2026-02-02 22:54:07.83233019 +0000 UTC m=+1203.523550516" Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.835034 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c3d93484-a8c3-4a2f-b2e0-22c007d77b11","Type":"ContainerStarted","Data":"3d2e63fe42d74e39bd498610e25f229cef451c31303bb51964e5f0e4e3c2e3ef"} Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.835488 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c3d93484-a8c3-4a2f-b2e0-22c007d77b11","Type":"ContainerStarted","Data":"a29ed88fead9f09e4927fcf90ff709e8671dce83d9b039eff7471519aa57bcb8"} Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.835193 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c3d93484-a8c3-4a2f-b2e0-22c007d77b11" containerName="glance-log" containerID="cri-o://a29ed88fead9f09e4927fcf90ff709e8671dce83d9b039eff7471519aa57bcb8" gracePeriod=30 Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.835616 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c3d93484-a8c3-4a2f-b2e0-22c007d77b11" containerName="glance-httpd" containerID="cri-o://3d2e63fe42d74e39bd498610e25f229cef451c31303bb51964e5f0e4e3c2e3ef" gracePeriod=30 Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.837796 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-96d6dcbbf-js6bv" event={"ID":"dd8f8c90-543e-4125-9a9b-8c33e75c75ca","Type":"ContainerStarted","Data":"cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc"} Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.837838 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-96d6dcbbf-js6bv" event={"ID":"dd8f8c90-543e-4125-9a9b-8c33e75c75ca","Type":"ContainerStarted","Data":"b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5"} Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.837848 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-96d6dcbbf-js6bv" event={"ID":"dd8f8c90-543e-4125-9a9b-8c33e75c75ca","Type":"ContainerStarted","Data":"c7a78759ba5bee3ec6248578d1f143b92bf1192ae87a9d85e5e0416d10432241"} Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.838685 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.841194 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85c9cbd9b8-2tlmh" event={"ID":"107d5dc0-af83-4833-bf2f-cd99d0e1f15d","Type":"ContainerStarted","Data":"cf39e69b8535440711741f79a967337f1e9e8694f99048739d48de23d6d54f68"} Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.841456 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85c9cbd9b8-2tlmh" event={"ID":"107d5dc0-af83-4833-bf2f-cd99d0e1f15d","Type":"ContainerStarted","Data":"6694fef6169e0b72eb1f3de2061069f4acd39aad940d7d53961c3036cd18ddd5"} Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.842061 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.844013 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2","Type":"ContainerStarted","Data":"b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7"} Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.844038 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2","Type":"ContainerStarted","Data":"e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e"} Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.844121 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" containerName="glance-log" containerID="cri-o://e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e" gracePeriod=30 Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.844310 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" containerName="glance-httpd" containerID="cri-o://b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7" gracePeriod=30 Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.860397 4755 generic.go:334] "Generic (PLEG): container finished" podID="73a43f85-781b-4f83-b18d-0d69e6d272e0" containerID="f5e0897b9b71dd6d64819c54eca3881e2bc5b12447ee249b80f91e1a20f5042c" exitCode=0 Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.863016 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" event={"ID":"73a43f85-781b-4f83-b18d-0d69e6d272e0","Type":"ContainerDied","Data":"f5e0897b9b71dd6d64819c54eca3881e2bc5b12447ee249b80f91e1a20f5042c"} Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.865071 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=32.865050259 podStartE2EDuration="32.865050259s" podCreationTimestamp="2026-02-02 22:53:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:08.856229084 +0000 UTC m=+1204.547449400" watchObservedRunningTime="2026-02-02 22:54:08.865050259 +0000 UTC m=+1204.556270585" Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.881061 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=32.881039423 podStartE2EDuration="32.881039423s" podCreationTimestamp="2026-02-02 22:53:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:08.876283631 +0000 UTC m=+1204.567503967" watchObservedRunningTime="2026-02-02 22:54:08.881039423 +0000 UTC m=+1204.572259749" Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.913334 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-96d6dcbbf-js6bv" podStartSLOduration=9.91331961 podStartE2EDuration="9.91331961s" podCreationTimestamp="2026-02-02 22:53:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:08.90971626 +0000 UTC m=+1204.600936586" watchObservedRunningTime="2026-02-02 22:54:08.91331961 +0000 UTC m=+1204.604539936" Feb 02 22:54:08 crc kubenswrapper[4755]: I0202 22:54:08.939106 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-85c9cbd9b8-2tlmh" podStartSLOduration=11.939078626 podStartE2EDuration="11.939078626s" podCreationTimestamp="2026-02-02 22:53:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:08.92915048 +0000 UTC m=+1204.620370816" watchObservedRunningTime="2026-02-02 22:54:08.939078626 +0000 UTC m=+1204.630298962" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.842627 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.875347 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" event={"ID":"73a43f85-781b-4f83-b18d-0d69e6d272e0","Type":"ContainerStarted","Data":"774ccc6d24c7409568cc73b5565ada9f92518ff2b8eb0bc4d4fb44735ddafc05"} Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.876478 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.880510 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8","Type":"ContainerStarted","Data":"1dc6513b3c0184af868a6b39a049b284d48393cdab9d92a1ff4976d21747c80a"} Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.882822 4755 generic.go:334] "Generic (PLEG): container finished" podID="c3d93484-a8c3-4a2f-b2e0-22c007d77b11" containerID="3d2e63fe42d74e39bd498610e25f229cef451c31303bb51964e5f0e4e3c2e3ef" exitCode=0 Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.882843 4755 generic.go:334] "Generic (PLEG): container finished" podID="c3d93484-a8c3-4a2f-b2e0-22c007d77b11" containerID="a29ed88fead9f09e4927fcf90ff709e8671dce83d9b039eff7471519aa57bcb8" exitCode=143 Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.882872 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c3d93484-a8c3-4a2f-b2e0-22c007d77b11","Type":"ContainerDied","Data":"3d2e63fe42d74e39bd498610e25f229cef451c31303bb51964e5f0e4e3c2e3ef"} Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.882888 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c3d93484-a8c3-4a2f-b2e0-22c007d77b11","Type":"ContainerDied","Data":"a29ed88fead9f09e4927fcf90ff709e8671dce83d9b039eff7471519aa57bcb8"} Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.884447 4755 generic.go:334] "Generic (PLEG): container finished" podID="a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" containerID="b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7" exitCode=143 Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.884473 4755 generic.go:334] "Generic (PLEG): container finished" podID="a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" containerID="e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e" exitCode=143 Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.885078 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.885659 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2","Type":"ContainerDied","Data":"b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7"} Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.885915 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2","Type":"ContainerDied","Data":"e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e"} Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.885933 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2","Type":"ContainerDied","Data":"154d4c16a0cd44e9bab6736091068087bc68df248600090318d97853f809adb4"} Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.885949 4755 scope.go:117] "RemoveContainer" containerID="b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.900182 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" podStartSLOduration=12.900164925 podStartE2EDuration="12.900164925s" podCreationTimestamp="2026-02-02 22:53:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:09.896907605 +0000 UTC m=+1205.588127931" watchObservedRunningTime="2026-02-02 22:54:09.900164925 +0000 UTC m=+1205.591385251" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.922189 4755 scope.go:117] "RemoveContainer" containerID="e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.939405 4755 scope.go:117] "RemoveContainer" containerID="b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7" Feb 02 22:54:09 crc kubenswrapper[4755]: E0202 22:54:09.940125 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7\": container with ID starting with b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7 not found: ID does not exist" containerID="b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.940156 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7"} err="failed to get container status \"b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7\": rpc error: code = NotFound desc = could not find container \"b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7\": container with ID starting with b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7 not found: ID does not exist" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.940194 4755 scope.go:117] "RemoveContainer" containerID="e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e" Feb 02 22:54:09 crc kubenswrapper[4755]: E0202 22:54:09.940510 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e\": container with ID starting with e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e not found: ID does not exist" containerID="e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.940534 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e"} err="failed to get container status \"e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e\": rpc error: code = NotFound desc = could not find container \"e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e\": container with ID starting with e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e not found: ID does not exist" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.940548 4755 scope.go:117] "RemoveContainer" containerID="b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.940804 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7"} err="failed to get container status \"b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7\": rpc error: code = NotFound desc = could not find container \"b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7\": container with ID starting with b823d881239168bd04301f5922e74ac6d666f2e70f57c27cee8cdb6d10871dd7 not found: ID does not exist" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.940828 4755 scope.go:117] "RemoveContainer" containerID="e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.941100 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e"} err="failed to get container status \"e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e\": rpc error: code = NotFound desc = could not find container \"e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e\": container with ID starting with e66bb3a93cb64a46d6b4f91e0907f2a8381eefa4d5d409f0c61bfb9da0de6c1e not found: ID does not exist" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.968884 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.993155 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-scripts\") pod \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.993317 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-config-data\") pod \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.998052 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjxkr\" (UniqueName: \"kubernetes.io/projected/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-kube-api-access-bjxkr\") pod \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.998415 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-combined-ca-bundle\") pod \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " Feb 02 22:54:09 crc kubenswrapper[4755]: I0202 22:54:09.998540 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.001030 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-logs\") pod \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.001067 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-httpd-run\") pod \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\" (UID: \"a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2\") " Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:09.999041 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-scripts" (OuterVolumeSpecName: "scripts") pod "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" (UID: "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.001250 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-logs" (OuterVolumeSpecName: "logs") pod "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" (UID: "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.001489 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" (UID: "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.002135 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.002153 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.002163 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.004766 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-kube-api-access-bjxkr" (OuterVolumeSpecName: "kube-api-access-bjxkr") pod "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" (UID: "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2"). InnerVolumeSpecName "kube-api-access-bjxkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.038050 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484" (OuterVolumeSpecName: "glance") pod "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" (UID: "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2"). InnerVolumeSpecName "pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.051559 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" (UID: "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.065705 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-config-data" (OuterVolumeSpecName: "config-data") pod "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" (UID: "a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.103752 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-config-data\") pod \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.103824 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-scripts\") pod \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.103984 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.104010 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trlf2\" (UniqueName: \"kubernetes.io/projected/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-kube-api-access-trlf2\") pod \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.104033 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-combined-ca-bundle\") pod \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.104129 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-logs\") pod \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.104215 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-httpd-run\") pod \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\" (UID: \"c3d93484-a8c3-4a2f-b2e0-22c007d77b11\") " Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.104611 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.104628 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjxkr\" (UniqueName: \"kubernetes.io/projected/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-kube-api-access-bjxkr\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.104639 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.104657 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") on node \"crc\" " Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.105451 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-logs" (OuterVolumeSpecName: "logs") pod "c3d93484-a8c3-4a2f-b2e0-22c007d77b11" (UID: "c3d93484-a8c3-4a2f-b2e0-22c007d77b11"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.105775 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c3d93484-a8c3-4a2f-b2e0-22c007d77b11" (UID: "c3d93484-a8c3-4a2f-b2e0-22c007d77b11"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.108362 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-scripts" (OuterVolumeSpecName: "scripts") pod "c3d93484-a8c3-4a2f-b2e0-22c007d77b11" (UID: "c3d93484-a8c3-4a2f-b2e0-22c007d77b11"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.110263 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-kube-api-access-trlf2" (OuterVolumeSpecName: "kube-api-access-trlf2") pod "c3d93484-a8c3-4a2f-b2e0-22c007d77b11" (UID: "c3d93484-a8c3-4a2f-b2e0-22c007d77b11"). InnerVolumeSpecName "kube-api-access-trlf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.121656 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854" (OuterVolumeSpecName: "glance") pod "c3d93484-a8c3-4a2f-b2e0-22c007d77b11" (UID: "c3d93484-a8c3-4a2f-b2e0-22c007d77b11"). InnerVolumeSpecName "pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.138933 4755 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.139350 4755 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484") on node "crc" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.149836 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c3d93484-a8c3-4a2f-b2e0-22c007d77b11" (UID: "c3d93484-a8c3-4a2f-b2e0-22c007d77b11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.165769 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-config-data" (OuterVolumeSpecName: "config-data") pod "c3d93484-a8c3-4a2f-b2e0-22c007d77b11" (UID: "c3d93484-a8c3-4a2f-b2e0-22c007d77b11"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.206597 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.206634 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.206651 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.206662 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.206700 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") on node \"crc\" " Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.206715 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trlf2\" (UniqueName: \"kubernetes.io/projected/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-kube-api-access-trlf2\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.206754 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3d93484-a8c3-4a2f-b2e0-22c007d77b11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.206772 4755 reconciler_common.go:293] "Volume detached for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.245882 4755 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.246042 4755 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854") on node "crc" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.308136 4755 reconciler_common.go:293] "Volume detached for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.319768 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.329621 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.346905 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:54:10 crc kubenswrapper[4755]: E0202 22:54:10.347255 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3d93484-a8c3-4a2f-b2e0-22c007d77b11" containerName="glance-log" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.347270 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3d93484-a8c3-4a2f-b2e0-22c007d77b11" containerName="glance-log" Feb 02 22:54:10 crc kubenswrapper[4755]: E0202 22:54:10.347284 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3d93484-a8c3-4a2f-b2e0-22c007d77b11" containerName="glance-httpd" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.347291 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3d93484-a8c3-4a2f-b2e0-22c007d77b11" containerName="glance-httpd" Feb 02 22:54:10 crc kubenswrapper[4755]: E0202 22:54:10.347312 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" containerName="glance-httpd" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.347317 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" containerName="glance-httpd" Feb 02 22:54:10 crc kubenswrapper[4755]: E0202 22:54:10.347333 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" containerName="glance-log" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.347339 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" containerName="glance-log" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.347545 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" containerName="glance-httpd" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.347572 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3d93484-a8c3-4a2f-b2e0-22c007d77b11" containerName="glance-log" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.347584 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3d93484-a8c3-4a2f-b2e0-22c007d77b11" containerName="glance-httpd" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.347593 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" containerName="glance-log" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.348743 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.356202 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.356461 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.391808 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.511111 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.511173 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-config-data\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.511255 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-scripts\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.511291 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjdlh\" (UniqueName: \"kubernetes.io/projected/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-kube-api-access-vjdlh\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.511378 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.511497 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.511532 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-logs\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.511565 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.613524 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.613567 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-logs\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.613595 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.613635 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.613654 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-config-data\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.613687 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-scripts\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.613710 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjdlh\" (UniqueName: \"kubernetes.io/projected/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-kube-api-access-vjdlh\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.613769 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.614201 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.614647 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-logs\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.618643 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.618920 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-config-data\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.619766 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-scripts\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.620353 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.620515 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/36a0f8b995f8d37a8776a91ffef55e84b7ae73b259c9d13bbc3129ab0c1d828a/globalmount\"" pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.621006 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.655316 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjdlh\" (UniqueName: \"kubernetes.io/projected/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-kube-api-access-vjdlh\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.678544 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.685958 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.895950 4755 generic.go:334] "Generic (PLEG): container finished" podID="27d7e0ea-4abd-4afe-be9b-460fbfea81c7" containerID="9115f92997d485b695ce9cd0af538bbda2028bf538d2e9d879f856d82c88c465" exitCode=0 Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.896001 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sk4gl" event={"ID":"27d7e0ea-4abd-4afe-be9b-460fbfea81c7","Type":"ContainerDied","Data":"9115f92997d485b695ce9cd0af538bbda2028bf538d2e9d879f856d82c88c465"} Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.899860 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.899876 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c3d93484-a8c3-4a2f-b2e0-22c007d77b11","Type":"ContainerDied","Data":"003d113fd6d22a83ac0c685ef19630937f85c1385f65b4169e6f83d37cf442dc"} Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.899933 4755 scope.go:117] "RemoveContainer" containerID="3d2e63fe42d74e39bd498610e25f229cef451c31303bb51964e5f0e4e3c2e3ef" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.903447 4755 generic.go:334] "Generic (PLEG): container finished" podID="4f55b2a0-624c-46b1-bede-8cb15264838e" containerID="93fab4df7bcf4224d2cfaa86fd176eddac2020cd566c469af102de4402e8778f" exitCode=0 Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.904023 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-55dvt" event={"ID":"4f55b2a0-624c-46b1-bede-8cb15264838e","Type":"ContainerDied","Data":"93fab4df7bcf4224d2cfaa86fd176eddac2020cd566c469af102de4402e8778f"} Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.941270 4755 scope.go:117] "RemoveContainer" containerID="a29ed88fead9f09e4927fcf90ff709e8671dce83d9b039eff7471519aa57bcb8" Feb 02 22:54:10 crc kubenswrapper[4755]: I0202 22:54:10.982056 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.010611 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.017848 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.019268 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.023343 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.033616 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.033997 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.096374 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2" path="/var/lib/kubelet/pods/a44c73c0-10f9-4a4a-9b1c-c1df045ed5c2/volumes" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.098480 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3d93484-a8c3-4a2f-b2e0-22c007d77b11" path="/var/lib/kubelet/pods/c3d93484-a8c3-4a2f-b2e0-22c007d77b11/volumes" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.125752 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ee3fd38-71d5-429f-87d5-1c3556ddff55-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.125803 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.125858 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.125879 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn97b\" (UniqueName: \"kubernetes.io/projected/5ee3fd38-71d5-429f-87d5-1c3556ddff55-kube-api-access-fn97b\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.125927 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.125947 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ee3fd38-71d5-429f-87d5-1c3556ddff55-logs\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.125979 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.125996 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.227899 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ee3fd38-71d5-429f-87d5-1c3556ddff55-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.227956 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.228017 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.228045 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn97b\" (UniqueName: \"kubernetes.io/projected/5ee3fd38-71d5-429f-87d5-1c3556ddff55-kube-api-access-fn97b\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.228109 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.228138 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ee3fd38-71d5-429f-87d5-1c3556ddff55-logs\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.228177 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.228199 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.228323 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ee3fd38-71d5-429f-87d5-1c3556ddff55-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.228861 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ee3fd38-71d5-429f-87d5-1c3556ddff55-logs\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.237938 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.238974 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.239012 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/12ae2e05c1109de2cfa2799707fdeed95eed1c35304f00e5efa94d1e550db555/globalmount\"" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.240455 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.241559 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.252326 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.285260 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn97b\" (UniqueName: \"kubernetes.io/projected/5ee3fd38-71d5-429f-87d5-1c3556ddff55-kube-api-access-fn97b\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.288582 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.300236 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:54:11 crc kubenswrapper[4755]: W0202 22:54:11.306029 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae1efe99_ce8f_4ef7_b641_dd666e3864ea.slice/crio-a5c100fc2653f744f1c1ba4222dd3ba008493963cd51157e4b296b411a6f5ca7 WatchSource:0}: Error finding container a5c100fc2653f744f1c1ba4222dd3ba008493963cd51157e4b296b411a6f5ca7: Status 404 returned error can't find the container with id a5c100fc2653f744f1c1ba4222dd3ba008493963cd51157e4b296b411a6f5ca7 Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.364113 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.916960 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.926518 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae1efe99-ce8f-4ef7-b641-dd666e3864ea","Type":"ContainerStarted","Data":"a5c100fc2653f744f1c1ba4222dd3ba008493963cd51157e4b296b411a6f5ca7"} Feb 02 22:54:11 crc kubenswrapper[4755]: W0202 22:54:11.929123 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ee3fd38_71d5_429f_87d5_1c3556ddff55.slice/crio-374a12a2747820eec4d77bba37f5a06f4de437d712abae0f136be9bb4e1983c5 WatchSource:0}: Error finding container 374a12a2747820eec4d77bba37f5a06f4de437d712abae0f136be9bb4e1983c5: Status 404 returned error can't find the container with id 374a12a2747820eec4d77bba37f5a06f4de437d712abae0f136be9bb4e1983c5 Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.929966 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xwr7s" event={"ID":"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda","Type":"ContainerStarted","Data":"b96da00dacfa0456a5c7b3629e2126674aac3e023dfa1a6921d3fd3b76bae1db"} Feb 02 22:54:11 crc kubenswrapper[4755]: I0202 22:54:11.951346 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-xwr7s" podStartSLOduration=2.655349745 podStartE2EDuration="42.951328346s" podCreationTimestamp="2026-02-02 22:53:29 +0000 UTC" firstStartedPulling="2026-02-02 22:53:31.187054957 +0000 UTC m=+1166.878275283" lastFinishedPulling="2026-02-02 22:54:11.483033558 +0000 UTC m=+1207.174253884" observedRunningTime="2026-02-02 22:54:11.947112829 +0000 UTC m=+1207.638333155" watchObservedRunningTime="2026-02-02 22:54:11.951328346 +0000 UTC m=+1207.642548672" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.421713 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sk4gl" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.427233 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.583218 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-combined-ca-bundle\") pod \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.583430 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9xwt\" (UniqueName: \"kubernetes.io/projected/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-kube-api-access-l9xwt\") pod \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.583500 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-scripts\") pod \"4f55b2a0-624c-46b1-bede-8cb15264838e\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.583521 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-config-data\") pod \"4f55b2a0-624c-46b1-bede-8cb15264838e\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.583549 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-credential-keys\") pod \"4f55b2a0-624c-46b1-bede-8cb15264838e\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.583590 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-config-data\") pod \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.583637 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-logs\") pod \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.583670 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-scripts\") pod \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\" (UID: \"27d7e0ea-4abd-4afe-be9b-460fbfea81c7\") " Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.583719 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-fernet-keys\") pod \"4f55b2a0-624c-46b1-bede-8cb15264838e\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.583820 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-combined-ca-bundle\") pod \"4f55b2a0-624c-46b1-bede-8cb15264838e\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.583888 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vl7tp\" (UniqueName: \"kubernetes.io/projected/4f55b2a0-624c-46b1-bede-8cb15264838e-kube-api-access-vl7tp\") pod \"4f55b2a0-624c-46b1-bede-8cb15264838e\" (UID: \"4f55b2a0-624c-46b1-bede-8cb15264838e\") " Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.587151 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-logs" (OuterVolumeSpecName: "logs") pod "27d7e0ea-4abd-4afe-be9b-460fbfea81c7" (UID: "27d7e0ea-4abd-4afe-be9b-460fbfea81c7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.591579 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f55b2a0-624c-46b1-bede-8cb15264838e-kube-api-access-vl7tp" (OuterVolumeSpecName: "kube-api-access-vl7tp") pod "4f55b2a0-624c-46b1-bede-8cb15264838e" (UID: "4f55b2a0-624c-46b1-bede-8cb15264838e"). InnerVolumeSpecName "kube-api-access-vl7tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.592229 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-kube-api-access-l9xwt" (OuterVolumeSpecName: "kube-api-access-l9xwt") pod "27d7e0ea-4abd-4afe-be9b-460fbfea81c7" (UID: "27d7e0ea-4abd-4afe-be9b-460fbfea81c7"). InnerVolumeSpecName "kube-api-access-l9xwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.592295 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-scripts" (OuterVolumeSpecName: "scripts") pod "27d7e0ea-4abd-4afe-be9b-460fbfea81c7" (UID: "27d7e0ea-4abd-4afe-be9b-460fbfea81c7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.594936 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4f55b2a0-624c-46b1-bede-8cb15264838e" (UID: "4f55b2a0-624c-46b1-bede-8cb15264838e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.599944 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4f55b2a0-624c-46b1-bede-8cb15264838e" (UID: "4f55b2a0-624c-46b1-bede-8cb15264838e"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.606857 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-scripts" (OuterVolumeSpecName: "scripts") pod "4f55b2a0-624c-46b1-bede-8cb15264838e" (UID: "4f55b2a0-624c-46b1-bede-8cb15264838e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.627516 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27d7e0ea-4abd-4afe-be9b-460fbfea81c7" (UID: "27d7e0ea-4abd-4afe-be9b-460fbfea81c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.631057 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f55b2a0-624c-46b1-bede-8cb15264838e" (UID: "4f55b2a0-624c-46b1-bede-8cb15264838e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.645911 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-config-data" (OuterVolumeSpecName: "config-data") pod "4f55b2a0-624c-46b1-bede-8cb15264838e" (UID: "4f55b2a0-624c-46b1-bede-8cb15264838e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.659438 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-config-data" (OuterVolumeSpecName: "config-data") pod "27d7e0ea-4abd-4afe-be9b-460fbfea81c7" (UID: "27d7e0ea-4abd-4afe-be9b-460fbfea81c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.687369 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vl7tp\" (UniqueName: \"kubernetes.io/projected/4f55b2a0-624c-46b1-bede-8cb15264838e-kube-api-access-vl7tp\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.687427 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.687442 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9xwt\" (UniqueName: \"kubernetes.io/projected/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-kube-api-access-l9xwt\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.687454 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.687465 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.687476 4755 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.687487 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.687497 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.687508 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27d7e0ea-4abd-4afe-be9b-460fbfea81c7-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.687517 4755 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:12 crc kubenswrapper[4755]: I0202 22:54:12.687528 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f55b2a0-624c-46b1-bede-8cb15264838e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.029026 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5ee3fd38-71d5-429f-87d5-1c3556ddff55","Type":"ContainerStarted","Data":"f0496aa247df17c0c4a844b37679945755b95ce5a19a813c19f9f84bfb3d4c29"} Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.029370 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5ee3fd38-71d5-429f-87d5-1c3556ddff55","Type":"ContainerStarted","Data":"374a12a2747820eec4d77bba37f5a06f4de437d712abae0f136be9bb4e1983c5"} Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.108303 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sk4gl" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.126225 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.155600 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.155582211 podStartE2EDuration="3.155582211s" podCreationTimestamp="2026-02-02 22:54:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:13.120378313 +0000 UTC m=+1208.811598639" watchObservedRunningTime="2026-02-02 22:54:13.155582211 +0000 UTC m=+1208.846802537" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.161604 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae1efe99-ce8f-4ef7-b641-dd666e3864ea","Type":"ContainerStarted","Data":"cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef"} Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.161642 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae1efe99-ce8f-4ef7-b641-dd666e3864ea","Type":"ContainerStarted","Data":"13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb"} Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.161653 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sk4gl" event={"ID":"27d7e0ea-4abd-4afe-be9b-460fbfea81c7","Type":"ContainerDied","Data":"90f2f25850750a63e6507fca629ba805b7598a6320d7b425e7aba84610bd06f2"} Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.161665 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90f2f25850750a63e6507fca629ba805b7598a6320d7b425e7aba84610bd06f2" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.161674 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-55dvt" event={"ID":"4f55b2a0-624c-46b1-bede-8cb15264838e","Type":"ContainerDied","Data":"d5686bfa084c6c2a2acf0d418ab74db1409f2741c0aaadf4317165875a3bb639"} Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.161683 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5686bfa084c6c2a2acf0d418ab74db1409f2741c0aaadf4317165875a3bb639" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.186816 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7cb7f594d6-76xlv"] Feb 02 22:54:13 crc kubenswrapper[4755]: E0202 22:54:13.187371 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27d7e0ea-4abd-4afe-be9b-460fbfea81c7" containerName="placement-db-sync" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.187395 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="27d7e0ea-4abd-4afe-be9b-460fbfea81c7" containerName="placement-db-sync" Feb 02 22:54:13 crc kubenswrapper[4755]: E0202 22:54:13.187452 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f55b2a0-624c-46b1-bede-8cb15264838e" containerName="keystone-bootstrap" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.187461 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f55b2a0-624c-46b1-bede-8cb15264838e" containerName="keystone-bootstrap" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.187718 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f55b2a0-624c-46b1-bede-8cb15264838e" containerName="keystone-bootstrap" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.187765 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="27d7e0ea-4abd-4afe-be9b-460fbfea81c7" containerName="placement-db-sync" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.188634 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.196225 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pvh95" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.196409 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.210662 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.214331 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.215505 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.215932 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.220322 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7cb7f594d6-76xlv"] Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.228326 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5bdd8df796-zxkxp"] Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.229872 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.241084 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.241332 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.241461 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-kgxwf" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.241582 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.241688 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.270476 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5bdd8df796-zxkxp"] Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.341207 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r469t\" (UniqueName: \"kubernetes.io/projected/9ba7348e-74b2-4840-8d02-8bfa3c89c483-kube-api-access-r469t\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.341256 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-combined-ca-bundle\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.341287 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqnqs\" (UniqueName: \"kubernetes.io/projected/58da41fb-8aca-4566-a2c2-a13c57ee04ce-kube-api-access-cqnqs\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.341308 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-scripts\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.341324 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-internal-tls-certs\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.341362 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-internal-tls-certs\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.341386 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-scripts\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.341401 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-credential-keys\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.341421 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-combined-ca-bundle\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.341478 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-config-data\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.358341 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-fernet-keys\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.358413 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-public-tls-certs\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.358457 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-public-tls-certs\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.358491 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-config-data\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.358570 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58da41fb-8aca-4566-a2c2-a13c57ee04ce-logs\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.460270 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-fernet-keys\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.460325 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-public-tls-certs\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.460356 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-public-tls-certs\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.460384 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-config-data\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.460438 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58da41fb-8aca-4566-a2c2-a13c57ee04ce-logs\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.460478 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r469t\" (UniqueName: \"kubernetes.io/projected/9ba7348e-74b2-4840-8d02-8bfa3c89c483-kube-api-access-r469t\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.461096 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-combined-ca-bundle\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.461151 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58da41fb-8aca-4566-a2c2-a13c57ee04ce-logs\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.461207 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqnqs\" (UniqueName: \"kubernetes.io/projected/58da41fb-8aca-4566-a2c2-a13c57ee04ce-kube-api-access-cqnqs\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.461287 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-scripts\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.461321 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-internal-tls-certs\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.461855 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-internal-tls-certs\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.461905 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-scripts\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.461931 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-credential-keys\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.461964 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-combined-ca-bundle\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.462953 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-config-data\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.465445 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-fernet-keys\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.465445 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-public-tls-certs\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.468031 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-combined-ca-bundle\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.483358 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-public-tls-certs\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.483523 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-config-data\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.483678 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-combined-ca-bundle\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.483676 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-scripts\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.486281 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-internal-tls-certs\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.488285 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r469t\" (UniqueName: \"kubernetes.io/projected/9ba7348e-74b2-4840-8d02-8bfa3c89c483-kube-api-access-r469t\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.489453 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-internal-tls-certs\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.489505 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-config-data\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.489886 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-credential-keys\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.491476 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqnqs\" (UniqueName: \"kubernetes.io/projected/58da41fb-8aca-4566-a2c2-a13c57ee04ce-kube-api-access-cqnqs\") pod \"placement-5bdd8df796-zxkxp\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.494311 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ba7348e-74b2-4840-8d02-8bfa3c89c483-scripts\") pod \"keystone-7cb7f594d6-76xlv\" (UID: \"9ba7348e-74b2-4840-8d02-8bfa3c89c483\") " pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.635986 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-f565458cd-6bkv6"] Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.643957 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.656659 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-f565458cd-6bkv6"] Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.685055 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.685907 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.769919 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-public-tls-certs\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.770045 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2q85\" (UniqueName: \"kubernetes.io/projected/541bac40-f0e1-4d39-9595-447b0f5b0c26-kube-api-access-d2q85\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.770169 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-internal-tls-certs\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.770244 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-scripts\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.770337 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-combined-ca-bundle\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.770421 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/541bac40-f0e1-4d39-9595-447b0f5b0c26-logs\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.770446 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-config-data\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.873853 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-internal-tls-certs\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.873941 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-scripts\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.874012 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-combined-ca-bundle\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.874084 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/541bac40-f0e1-4d39-9595-447b0f5b0c26-logs\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.874099 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-config-data\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.874419 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-public-tls-certs\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.874672 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2q85\" (UniqueName: \"kubernetes.io/projected/541bac40-f0e1-4d39-9595-447b0f5b0c26-kube-api-access-d2q85\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.879556 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/541bac40-f0e1-4d39-9595-447b0f5b0c26-logs\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.881590 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-internal-tls-certs\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.884014 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-scripts\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.885056 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-combined-ca-bundle\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.885977 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-public-tls-certs\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.886239 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/541bac40-f0e1-4d39-9595-447b0f5b0c26-config-data\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.896994 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2q85\" (UniqueName: \"kubernetes.io/projected/541bac40-f0e1-4d39-9595-447b0f5b0c26-kube-api-access-d2q85\") pod \"placement-f565458cd-6bkv6\" (UID: \"541bac40-f0e1-4d39-9595-447b0f5b0c26\") " pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:13 crc kubenswrapper[4755]: I0202 22:54:13.965790 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:15 crc kubenswrapper[4755]: I0202 22:54:15.165031 4755 generic.go:334] "Generic (PLEG): container finished" podID="e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda" containerID="b96da00dacfa0456a5c7b3629e2126674aac3e023dfa1a6921d3fd3b76bae1db" exitCode=0 Feb 02 22:54:15 crc kubenswrapper[4755]: I0202 22:54:15.165250 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xwr7s" event={"ID":"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda","Type":"ContainerDied","Data":"b96da00dacfa0456a5c7b3629e2126674aac3e023dfa1a6921d3fd3b76bae1db"} Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.187832 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xwr7s" event={"ID":"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda","Type":"ContainerDied","Data":"0e47d6d930521f79046aa20a76502453ab0519e7a52d58b894d7b1c59c8d531c"} Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.188834 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e47d6d930521f79046aa20a76502453ab0519e7a52d58b894d7b1c59c8d531c" Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.405929 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.467771 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-combined-ca-bundle\") pod \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\" (UID: \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\") " Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.496017 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda" (UID: "e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.569842 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkdd7\" (UniqueName: \"kubernetes.io/projected/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-kube-api-access-nkdd7\") pod \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\" (UID: \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\") " Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.569971 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-db-sync-config-data\") pod \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\" (UID: \"e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda\") " Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.570476 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.573627 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda" (UID: "e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.574878 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-kube-api-access-nkdd7" (OuterVolumeSpecName: "kube-api-access-nkdd7") pod "e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda" (UID: "e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda"). InnerVolumeSpecName "kube-api-access-nkdd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.651883 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5bdd8df796-zxkxp"] Feb 02 22:54:17 crc kubenswrapper[4755]: W0202 22:54:17.668252 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58da41fb_8aca_4566_a2c2_a13c57ee04ce.slice/crio-507913ec81cca7d331ec162bf1b44e8cfc7d5cb057465b706cebf39f7a4af20b WatchSource:0}: Error finding container 507913ec81cca7d331ec162bf1b44e8cfc7d5cb057465b706cebf39f7a4af20b: Status 404 returned error can't find the container with id 507913ec81cca7d331ec162bf1b44e8cfc7d5cb057465b706cebf39f7a4af20b Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.671193 4755 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.671220 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkdd7\" (UniqueName: \"kubernetes.io/projected/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda-kube-api-access-nkdd7\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.685783 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-f565458cd-6bkv6"] Feb 02 22:54:17 crc kubenswrapper[4755]: W0202 22:54:17.703107 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod541bac40_f0e1_4d39_9595_447b0f5b0c26.slice/crio-0b26fbf7de8412309bf70eb826779be00c31bc6d519278a26fe9cf6412e04276 WatchSource:0}: Error finding container 0b26fbf7de8412309bf70eb826779be00c31bc6d519278a26fe9cf6412e04276: Status 404 returned error can't find the container with id 0b26fbf7de8412309bf70eb826779be00c31bc6d519278a26fe9cf6412e04276 Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.747899 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.813936 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-72bxr"] Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.814362 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" podUID="48a013b4-3dbe-4944-b931-474ec989a214" containerName="dnsmasq-dns" containerID="cri-o://653749eac1c3ef1b80609303926d7bfeb7f12bbb92c79f10be55d5a827c578f2" gracePeriod=10 Feb 02 22:54:17 crc kubenswrapper[4755]: I0202 22:54:17.913008 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7cb7f594d6-76xlv"] Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.350928 4755 generic.go:334] "Generic (PLEG): container finished" podID="48a013b4-3dbe-4944-b931-474ec989a214" containerID="653749eac1c3ef1b80609303926d7bfeb7f12bbb92c79f10be55d5a827c578f2" exitCode=0 Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.351191 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" event={"ID":"48a013b4-3dbe-4944-b931-474ec989a214","Type":"ContainerDied","Data":"653749eac1c3ef1b80609303926d7bfeb7f12bbb92c79f10be55d5a827c578f2"} Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.353374 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5ee3fd38-71d5-429f-87d5-1c3556ddff55","Type":"ContainerStarted","Data":"9e76d4b8d4fd58f8070c673771c4dab54e8d08ed55c687d8b000cd092756dcb6"} Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.356794 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8","Type":"ContainerStarted","Data":"d051e8e995cd5167ecced02fb28ad36c7b2120e447b81b7cdfd7b9d886646cbe"} Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.371022 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f565458cd-6bkv6" event={"ID":"541bac40-f0e1-4d39-9595-447b0f5b0c26","Type":"ContainerStarted","Data":"f7b7bd797ca6018684f556380af6770b5647f78bbcbd0ba9b29edf50257a0628"} Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.371066 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f565458cd-6bkv6" event={"ID":"541bac40-f0e1-4d39-9595-447b0f5b0c26","Type":"ContainerStarted","Data":"0b26fbf7de8412309bf70eb826779be00c31bc6d519278a26fe9cf6412e04276"} Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.372136 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5bdd8df796-zxkxp" event={"ID":"58da41fb-8aca-4566-a2c2-a13c57ee04ce","Type":"ContainerStarted","Data":"813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0"} Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.372159 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5bdd8df796-zxkxp" event={"ID":"58da41fb-8aca-4566-a2c2-a13c57ee04ce","Type":"ContainerStarted","Data":"507913ec81cca7d331ec162bf1b44e8cfc7d5cb057465b706cebf39f7a4af20b"} Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.375678 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xwr7s" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.378507 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7cb7f594d6-76xlv" event={"ID":"9ba7348e-74b2-4840-8d02-8bfa3c89c483","Type":"ContainerStarted","Data":"9ceebe57b6290fe167d7ff6071e6157ec258d49f959d0f06e944d873a906fd20"} Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.380218 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.410955 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.410861854 podStartE2EDuration="8.410861854s" podCreationTimestamp="2026-02-02 22:54:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:18.390655643 +0000 UTC m=+1214.081875969" watchObservedRunningTime="2026-02-02 22:54:18.410861854 +0000 UTC m=+1214.102082180" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.436681 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7cb7f594d6-76xlv" podStartSLOduration=5.436663521 podStartE2EDuration="5.436663521s" podCreationTimestamp="2026-02-02 22:54:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:18.411692077 +0000 UTC m=+1214.102912403" watchObservedRunningTime="2026-02-02 22:54:18.436663521 +0000 UTC m=+1214.127883847" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.628769 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.669793 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7cbd488799-dtxcz"] Feb 02 22:54:18 crc kubenswrapper[4755]: E0202 22:54:18.670272 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a013b4-3dbe-4944-b931-474ec989a214" containerName="dnsmasq-dns" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.670301 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a013b4-3dbe-4944-b931-474ec989a214" containerName="dnsmasq-dns" Feb 02 22:54:18 crc kubenswrapper[4755]: E0202 22:54:18.670325 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda" containerName="barbican-db-sync" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.670333 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda" containerName="barbican-db-sync" Feb 02 22:54:18 crc kubenswrapper[4755]: E0202 22:54:18.670347 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a013b4-3dbe-4944-b931-474ec989a214" containerName="init" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.670354 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a013b4-3dbe-4944-b931-474ec989a214" containerName="init" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.670561 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="48a013b4-3dbe-4944-b931-474ec989a214" containerName="dnsmasq-dns" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.670586 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda" containerName="barbican-db-sync" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.671605 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.677849 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-mkskt" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.678040 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.680401 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.742362 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-58cb968fcd-5t5jk"] Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.744070 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.749209 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.755916 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7cbd488799-dtxcz"] Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.777171 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-58cb968fcd-5t5jk"] Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.811812 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-hcqk6"] Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.813546 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.815384 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-config\") pod \"48a013b4-3dbe-4944-b931-474ec989a214\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.815418 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-ovsdbserver-nb\") pod \"48a013b4-3dbe-4944-b931-474ec989a214\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.815488 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbjcb\" (UniqueName: \"kubernetes.io/projected/48a013b4-3dbe-4944-b931-474ec989a214-kube-api-access-sbjcb\") pod \"48a013b4-3dbe-4944-b931-474ec989a214\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.815533 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-dns-svc\") pod \"48a013b4-3dbe-4944-b931-474ec989a214\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.815597 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-dns-swift-storage-0\") pod \"48a013b4-3dbe-4944-b931-474ec989a214\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.815649 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-ovsdbserver-sb\") pod \"48a013b4-3dbe-4944-b931-474ec989a214\" (UID: \"48a013b4-3dbe-4944-b931-474ec989a214\") " Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.815972 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-logs\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.816007 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptfg2\" (UniqueName: \"kubernetes.io/projected/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-kube-api-access-ptfg2\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.816046 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-config-data\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.816082 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-config-data-custom\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.816105 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-combined-ca-bundle\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.820863 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-hcqk6"] Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.825161 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48a013b4-3dbe-4944-b931-474ec989a214-kube-api-access-sbjcb" (OuterVolumeSpecName: "kube-api-access-sbjcb") pod "48a013b4-3dbe-4944-b931-474ec989a214" (UID: "48a013b4-3dbe-4944-b931-474ec989a214"). InnerVolumeSpecName "kube-api-access-sbjcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918401 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw8lv\" (UniqueName: \"kubernetes.io/projected/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-kube-api-access-dw8lv\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918444 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-config-data-custom\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918467 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3838c6a5-bf59-4820-a400-0877e82598f6-combined-ca-bundle\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918489 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-combined-ca-bundle\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918532 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918555 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6wf8\" (UniqueName: \"kubernetes.io/projected/3838c6a5-bf59-4820-a400-0877e82598f6-kube-api-access-k6wf8\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918572 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3838c6a5-bf59-4820-a400-0877e82598f6-config-data\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918589 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918622 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3838c6a5-bf59-4820-a400-0877e82598f6-config-data-custom\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918669 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3838c6a5-bf59-4820-a400-0877e82598f6-logs\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918684 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-config\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918720 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-logs\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918752 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918772 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptfg2\" (UniqueName: \"kubernetes.io/projected/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-kube-api-access-ptfg2\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918809 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-config-data\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918828 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.918884 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbjcb\" (UniqueName: \"kubernetes.io/projected/48a013b4-3dbe-4944-b931-474ec989a214-kube-api-access-sbjcb\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.921068 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-66999cb498-vs49f"] Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.922791 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.923661 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-logs\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.930462 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.936708 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "48a013b4-3dbe-4944-b931-474ec989a214" (UID: "48a013b4-3dbe-4944-b931-474ec989a214"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.941316 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-config-data-custom\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.941691 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-config-data\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.948395 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-combined-ca-bundle\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.950199 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "48a013b4-3dbe-4944-b931-474ec989a214" (UID: "48a013b4-3dbe-4944-b931-474ec989a214"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.953513 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-config" (OuterVolumeSpecName: "config") pod "48a013b4-3dbe-4944-b931-474ec989a214" (UID: "48a013b4-3dbe-4944-b931-474ec989a214"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.955541 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptfg2\" (UniqueName: \"kubernetes.io/projected/11097ba4-a9db-49eb-a685-fbb2a15e6d6a-kube-api-access-ptfg2\") pod \"barbican-worker-7cbd488799-dtxcz\" (UID: \"11097ba4-a9db-49eb-a685-fbb2a15e6d6a\") " pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.961231 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "48a013b4-3dbe-4944-b931-474ec989a214" (UID: "48a013b4-3dbe-4944-b931-474ec989a214"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:18 crc kubenswrapper[4755]: I0202 22:54:18.966044 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-66999cb498-vs49f"] Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.015555 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "48a013b4-3dbe-4944-b931-474ec989a214" (UID: "48a013b4-3dbe-4944-b931-474ec989a214"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.020755 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3838c6a5-bf59-4820-a400-0877e82598f6-config-data-custom\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.020829 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-combined-ca-bundle\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.020856 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3838c6a5-bf59-4820-a400-0877e82598f6-logs\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.020876 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-config\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.020897 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-config-data-custom\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.020935 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.020975 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b24k\" (UniqueName: \"kubernetes.io/projected/26b37ea7-b9d1-4db2-976b-8d40431c46e2-kube-api-access-2b24k\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.020997 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021017 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26b37ea7-b9d1-4db2-976b-8d40431c46e2-logs\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021048 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dw8lv\" (UniqueName: \"kubernetes.io/projected/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-kube-api-access-dw8lv\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021067 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3838c6a5-bf59-4820-a400-0877e82598f6-combined-ca-bundle\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021115 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-config-data\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021136 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021159 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6wf8\" (UniqueName: \"kubernetes.io/projected/3838c6a5-bf59-4820-a400-0877e82598f6-kube-api-access-k6wf8\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021177 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3838c6a5-bf59-4820-a400-0877e82598f6-config-data\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021191 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021240 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021253 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021263 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021271 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021280 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48a013b4-3dbe-4944-b931-474ec989a214-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.021974 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.024822 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.025482 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-config\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.025788 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3838c6a5-bf59-4820-a400-0877e82598f6-logs\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.025937 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.026545 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.030807 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3838c6a5-bf59-4820-a400-0877e82598f6-combined-ca-bundle\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.034384 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7cbd488799-dtxcz" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.037918 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3838c6a5-bf59-4820-a400-0877e82598f6-config-data\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.043372 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3838c6a5-bf59-4820-a400-0877e82598f6-config-data-custom\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.047518 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6wf8\" (UniqueName: \"kubernetes.io/projected/3838c6a5-bf59-4820-a400-0877e82598f6-kube-api-access-k6wf8\") pod \"barbican-keystone-listener-58cb968fcd-5t5jk\" (UID: \"3838c6a5-bf59-4820-a400-0877e82598f6\") " pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.051052 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dw8lv\" (UniqueName: \"kubernetes.io/projected/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-kube-api-access-dw8lv\") pod \"dnsmasq-dns-848cf88cfc-hcqk6\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.091426 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.126116 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-combined-ca-bundle\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.126330 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-config-data-custom\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.126438 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b24k\" (UniqueName: \"kubernetes.io/projected/26b37ea7-b9d1-4db2-976b-8d40431c46e2-kube-api-access-2b24k\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.126539 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26b37ea7-b9d1-4db2-976b-8d40431c46e2-logs\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.126647 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-config-data\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.133305 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26b37ea7-b9d1-4db2-976b-8d40431c46e2-logs\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.134378 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-config-data\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.135218 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-combined-ca-bundle\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.137477 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-config-data-custom\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.147317 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.152620 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b24k\" (UniqueName: \"kubernetes.io/projected/26b37ea7-b9d1-4db2-976b-8d40431c46e2-kube-api-access-2b24k\") pod \"barbican-api-66999cb498-vs49f\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.250993 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.412812 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-f565458cd-6bkv6" event={"ID":"541bac40-f0e1-4d39-9595-447b0f5b0c26","Type":"ContainerStarted","Data":"13111e349ce195a423de57906e5165939951be8c14ba984daa6520101fc01763"} Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.414840 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.415112 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.425374 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pwclg" event={"ID":"0403cc43-6199-4e95-b427-c4f268d8049a","Type":"ContainerStarted","Data":"16855b8ef35a76bebbf6699613df39982b91b3371c8f70a65ccc74159a20b7f4"} Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.452668 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-f565458cd-6bkv6" podStartSLOduration=6.452652255 podStartE2EDuration="6.452652255s" podCreationTimestamp="2026-02-02 22:54:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:19.446955526 +0000 UTC m=+1215.138175852" watchObservedRunningTime="2026-02-02 22:54:19.452652255 +0000 UTC m=+1215.143872581" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.471051 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5bdd8df796-zxkxp" event={"ID":"58da41fb-8aca-4566-a2c2-a13c57ee04ce","Type":"ContainerStarted","Data":"4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a"} Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.472252 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.472294 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.477423 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-pwclg" podStartSLOduration=4.773725075 podStartE2EDuration="50.477401132s" podCreationTimestamp="2026-02-02 22:53:29 +0000 UTC" firstStartedPulling="2026-02-02 22:53:31.543001486 +0000 UTC m=+1167.234221812" lastFinishedPulling="2026-02-02 22:54:17.246677543 +0000 UTC m=+1212.937897869" observedRunningTime="2026-02-02 22:54:19.465362308 +0000 UTC m=+1215.156582634" watchObservedRunningTime="2026-02-02 22:54:19.477401132 +0000 UTC m=+1215.168621458" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.480367 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7cb7f594d6-76xlv" event={"ID":"9ba7348e-74b2-4840-8d02-8bfa3c89c483","Type":"ContainerStarted","Data":"41d409dd233225ee657a0896219733a3901ada1c03d9e1d9cd08d457826ccf6b"} Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.484889 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.485094 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-72bxr" event={"ID":"48a013b4-3dbe-4944-b931-474ec989a214","Type":"ContainerDied","Data":"17367255b0464d66a939bf540ea1b0c83e8f701e6ba3a276bbcc53a3bf22d495"} Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.485160 4755 scope.go:117] "RemoveContainer" containerID="653749eac1c3ef1b80609303926d7bfeb7f12bbb92c79f10be55d5a827c578f2" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.500330 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5bdd8df796-zxkxp" podStartSLOduration=6.500315189 podStartE2EDuration="6.500315189s" podCreationTimestamp="2026-02-02 22:54:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:19.499467125 +0000 UTC m=+1215.190687441" watchObservedRunningTime="2026-02-02 22:54:19.500315189 +0000 UTC m=+1215.191535515" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.526814 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-72bxr"] Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.534484 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-72bxr"] Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.546647 4755 scope.go:117] "RemoveContainer" containerID="645ed23646e73aeaae2545c06f3362e0c789f0688b3648fd5c2ab4c1fb64862d" Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.730799 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-58cb968fcd-5t5jk"] Feb 02 22:54:19 crc kubenswrapper[4755]: I0202 22:54:19.989484 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-hcqk6"] Feb 02 22:54:20 crc kubenswrapper[4755]: W0202 22:54:20.001377 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11097ba4_a9db_49eb_a685_fbb2a15e6d6a.slice/crio-1baf281125f9d86ad1624197352f9429a23b4b23ee3e6372a6478dd49612a770 WatchSource:0}: Error finding container 1baf281125f9d86ad1624197352f9429a23b4b23ee3e6372a6478dd49612a770: Status 404 returned error can't find the container with id 1baf281125f9d86ad1624197352f9429a23b4b23ee3e6372a6478dd49612a770 Feb 02 22:54:20 crc kubenswrapper[4755]: I0202 22:54:20.006375 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7cbd488799-dtxcz"] Feb 02 22:54:20 crc kubenswrapper[4755]: I0202 22:54:20.080239 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-66999cb498-vs49f"] Feb 02 22:54:20 crc kubenswrapper[4755]: I0202 22:54:20.508928 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7cbd488799-dtxcz" event={"ID":"11097ba4-a9db-49eb-a685-fbb2a15e6d6a","Type":"ContainerStarted","Data":"1baf281125f9d86ad1624197352f9429a23b4b23ee3e6372a6478dd49612a770"} Feb 02 22:54:20 crc kubenswrapper[4755]: I0202 22:54:20.523927 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" event={"ID":"f7cdcbab-c1c0-4984-bf8d-781715d75dc1","Type":"ContainerStarted","Data":"62798e56284ae033d247acc9aadbf85d6b9998aab71f92fdf9873bafbef2dd43"} Feb 02 22:54:20 crc kubenswrapper[4755]: I0202 22:54:20.534532 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" event={"ID":"3838c6a5-bf59-4820-a400-0877e82598f6","Type":"ContainerStarted","Data":"df555f3f1147d833fbef56f2fe71e73771ded23f1d76672b0963a14cc2eeb117"} Feb 02 22:54:20 crc kubenswrapper[4755]: I0202 22:54:20.536906 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66999cb498-vs49f" event={"ID":"26b37ea7-b9d1-4db2-976b-8d40431c46e2","Type":"ContainerStarted","Data":"a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0"} Feb 02 22:54:20 crc kubenswrapper[4755]: I0202 22:54:20.536934 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66999cb498-vs49f" event={"ID":"26b37ea7-b9d1-4db2-976b-8d40431c46e2","Type":"ContainerStarted","Data":"2fd05ce15bb5124f452435130890773bea33dd305f52396efb4b43eb0cf0fb2a"} Feb 02 22:54:20 crc kubenswrapper[4755]: I0202 22:54:20.687172 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 22:54:20 crc kubenswrapper[4755]: I0202 22:54:20.687418 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 22:54:20 crc kubenswrapper[4755]: I0202 22:54:20.813854 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 22:54:20 crc kubenswrapper[4755]: I0202 22:54:20.814090 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.100273 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48a013b4-3dbe-4944-b931-474ec989a214" path="/var/lib/kubelet/pods/48a013b4-3dbe-4944-b931-474ec989a214/volumes" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.215234 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-75f5dc8786-9gzp2"] Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.218155 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.223251 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.223271 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.230749 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-75f5dc8786-9gzp2"] Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.296167 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-public-tls-certs\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.296218 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-internal-tls-certs\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.296249 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vw49\" (UniqueName: \"kubernetes.io/projected/ff86278c-f2b5-405a-a79c-f192d8aba1d5-kube-api-access-2vw49\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.296309 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-combined-ca-bundle\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.296338 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-config-data-custom\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.296362 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-config-data\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.296461 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff86278c-f2b5-405a-a79c-f192d8aba1d5-logs\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.364204 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.364249 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.394099 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.398218 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff86278c-f2b5-405a-a79c-f192d8aba1d5-logs\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.398270 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-public-tls-certs\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.398296 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-internal-tls-certs\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.398323 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vw49\" (UniqueName: \"kubernetes.io/projected/ff86278c-f2b5-405a-a79c-f192d8aba1d5-kube-api-access-2vw49\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.398374 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-combined-ca-bundle\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.398403 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-config-data-custom\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.398426 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-config-data\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.402327 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff86278c-f2b5-405a-a79c-f192d8aba1d5-logs\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.404875 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.405312 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-public-tls-certs\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.405370 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-internal-tls-certs\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.417938 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vw49\" (UniqueName: \"kubernetes.io/projected/ff86278c-f2b5-405a-a79c-f192d8aba1d5-kube-api-access-2vw49\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.421069 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-config-data\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.421588 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-combined-ca-bundle\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.426373 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ff86278c-f2b5-405a-a79c-f192d8aba1d5-config-data-custom\") pod \"barbican-api-75f5dc8786-9gzp2\" (UID: \"ff86278c-f2b5-405a-a79c-f192d8aba1d5\") " pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.548988 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.552748 4755 generic.go:334] "Generic (PLEG): container finished" podID="f7cdcbab-c1c0-4984-bf8d-781715d75dc1" containerID="602424169d3a36beccc90897c6afb5af8dbbbfd33ea287063a96a80fbfd41c7b" exitCode=0 Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.552821 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" event={"ID":"f7cdcbab-c1c0-4984-bf8d-781715d75dc1","Type":"ContainerDied","Data":"602424169d3a36beccc90897c6afb5af8dbbbfd33ea287063a96a80fbfd41c7b"} Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.566760 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66999cb498-vs49f" event={"ID":"26b37ea7-b9d1-4db2-976b-8d40431c46e2","Type":"ContainerStarted","Data":"a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a"} Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.568120 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.568151 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.568162 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.568170 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 22:54:21 crc kubenswrapper[4755]: I0202 22:54:21.599818 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-66999cb498-vs49f" podStartSLOduration=3.599800263 podStartE2EDuration="3.599800263s" podCreationTimestamp="2026-02-02 22:54:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:21.589108106 +0000 UTC m=+1217.280328442" watchObservedRunningTime="2026-02-02 22:54:21.599800263 +0000 UTC m=+1217.291020589" Feb 02 22:54:22 crc kubenswrapper[4755]: I0202 22:54:22.593223 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:22 crc kubenswrapper[4755]: I0202 22:54:22.593718 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.207838 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-75f5dc8786-9gzp2"] Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.617933 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-75zwb" event={"ID":"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe","Type":"ContainerStarted","Data":"446d14daa03e8b6b461fd6b5532b4b64048f99b269174b061d4c46666f735e82"} Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.630954 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7cbd488799-dtxcz" event={"ID":"11097ba4-a9db-49eb-a685-fbb2a15e6d6a","Type":"ContainerStarted","Data":"8e701973df9aca595cf1f2ef25874ac3df137371658dccfbfc52f4a8c3d08b76"} Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.631145 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7cbd488799-dtxcz" event={"ID":"11097ba4-a9db-49eb-a685-fbb2a15e6d6a","Type":"ContainerStarted","Data":"8fda619566cf59f2016891d30755edc0504fd5f337f05684d2c38eeb64027aa9"} Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.643565 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" event={"ID":"f7cdcbab-c1c0-4984-bf8d-781715d75dc1","Type":"ContainerStarted","Data":"d1d977b08beff48ba401e6a4010ff3ce99c2011b6dcea05e3f24572fd12dfb4f"} Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.644429 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.653358 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-75zwb" podStartSLOduration=2.857335598 podStartE2EDuration="54.653338461s" podCreationTimestamp="2026-02-02 22:53:29 +0000 UTC" firstStartedPulling="2026-02-02 22:53:31.512320433 +0000 UTC m=+1167.203540769" lastFinishedPulling="2026-02-02 22:54:23.308323306 +0000 UTC m=+1218.999543632" observedRunningTime="2026-02-02 22:54:23.641099291 +0000 UTC m=+1219.332319617" watchObservedRunningTime="2026-02-02 22:54:23.653338461 +0000 UTC m=+1219.344558787" Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.664703 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75f5dc8786-9gzp2" event={"ID":"ff86278c-f2b5-405a-a79c-f192d8aba1d5","Type":"ContainerStarted","Data":"f603c337a03ce2d7e34d90365b6e8862bf3b6c5715a188149136b029c1540cdc"} Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.668499 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75f5dc8786-9gzp2" event={"ID":"ff86278c-f2b5-405a-a79c-f192d8aba1d5","Type":"ContainerStarted","Data":"0fef8f03fcda65a1a5ff74eeb98227477f8bd87667d65babc07aa93f92b56f33"} Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.668525 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" event={"ID":"3838c6a5-bf59-4820-a400-0877e82598f6","Type":"ContainerStarted","Data":"800d2b144bb1246be5d06d70c7d43058ed61fb48500610629aa8ece045b17993"} Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.668538 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" event={"ID":"3838c6a5-bf59-4820-a400-0877e82598f6","Type":"ContainerStarted","Data":"296797c02564a9301c0b3ce1630267bad55fe6508e518b9a8463d0b5e3173adb"} Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.675158 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7cbd488799-dtxcz" podStartSLOduration=3.257686278 podStartE2EDuration="5.675133726s" podCreationTimestamp="2026-02-02 22:54:18 +0000 UTC" firstStartedPulling="2026-02-02 22:54:20.007354894 +0000 UTC m=+1215.698575220" lastFinishedPulling="2026-02-02 22:54:22.424802342 +0000 UTC m=+1218.116022668" observedRunningTime="2026-02-02 22:54:23.668274986 +0000 UTC m=+1219.359495312" watchObservedRunningTime="2026-02-02 22:54:23.675133726 +0000 UTC m=+1219.366354052" Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.700028 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-58cb968fcd-5t5jk" podStartSLOduration=3.028939803 podStartE2EDuration="5.699977626s" podCreationTimestamp="2026-02-02 22:54:18 +0000 UTC" firstStartedPulling="2026-02-02 22:54:19.752808713 +0000 UTC m=+1215.444029039" lastFinishedPulling="2026-02-02 22:54:22.423846536 +0000 UTC m=+1218.115066862" observedRunningTime="2026-02-02 22:54:23.695608465 +0000 UTC m=+1219.386828801" watchObservedRunningTime="2026-02-02 22:54:23.699977626 +0000 UTC m=+1219.391197952" Feb 02 22:54:23 crc kubenswrapper[4755]: I0202 22:54:23.729662 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" podStartSLOduration=5.72964185 podStartE2EDuration="5.72964185s" podCreationTimestamp="2026-02-02 22:54:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:23.724269371 +0000 UTC m=+1219.415489697" watchObservedRunningTime="2026-02-02 22:54:23.72964185 +0000 UTC m=+1219.420862176" Feb 02 22:54:24 crc kubenswrapper[4755]: I0202 22:54:24.676099 4755 generic.go:334] "Generic (PLEG): container finished" podID="0403cc43-6199-4e95-b427-c4f268d8049a" containerID="16855b8ef35a76bebbf6699613df39982b91b3371c8f70a65ccc74159a20b7f4" exitCode=0 Feb 02 22:54:24 crc kubenswrapper[4755]: I0202 22:54:24.676390 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pwclg" event={"ID":"0403cc43-6199-4e95-b427-c4f268d8049a","Type":"ContainerDied","Data":"16855b8ef35a76bebbf6699613df39982b91b3371c8f70a65ccc74159a20b7f4"} Feb 02 22:54:24 crc kubenswrapper[4755]: I0202 22:54:24.681077 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75f5dc8786-9gzp2" event={"ID":"ff86278c-f2b5-405a-a79c-f192d8aba1d5","Type":"ContainerStarted","Data":"9e19036314a26b05b3a6a64e44473a9016f05813a023e5111568627c62373962"} Feb 02 22:54:24 crc kubenswrapper[4755]: I0202 22:54:24.681531 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:24 crc kubenswrapper[4755]: I0202 22:54:24.681561 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:24 crc kubenswrapper[4755]: I0202 22:54:24.712169 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-75f5dc8786-9gzp2" podStartSLOduration=3.712152725 podStartE2EDuration="3.712152725s" podCreationTimestamp="2026-02-02 22:54:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:24.709630885 +0000 UTC m=+1220.400851211" watchObservedRunningTime="2026-02-02 22:54:24.712152725 +0000 UTC m=+1220.403373051" Feb 02 22:54:25 crc kubenswrapper[4755]: I0202 22:54:25.594792 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 22:54:25 crc kubenswrapper[4755]: I0202 22:54:25.595197 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:25 crc kubenswrapper[4755]: I0202 22:54:25.595336 4755 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 22:54:25 crc kubenswrapper[4755]: I0202 22:54:25.595377 4755 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 22:54:25 crc kubenswrapper[4755]: I0202 22:54:25.611257 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 22:54:25 crc kubenswrapper[4755]: I0202 22:54:25.617377 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 22:54:27 crc kubenswrapper[4755]: I0202 22:54:27.763064 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-66999cb498-vs49f" podUID="26b37ea7-b9d1-4db2-976b-8d40431c46e2" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 22:54:27 crc kubenswrapper[4755]: I0202 22:54:27.829695 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.103855 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-96d6dcbbf-js6bv"] Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.104080 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-96d6dcbbf-js6bv" podUID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" containerName="neutron-api" containerID="cri-o://b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5" gracePeriod=30 Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.104765 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-96d6dcbbf-js6bv" podUID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" containerName="neutron-httpd" containerID="cri-o://cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc" gracePeriod=30 Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.118204 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-96d6dcbbf-js6bv" podUID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.174:9696/\": EOF" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.140719 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-d7cb5dffc-4r8bd"] Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.145085 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.155641 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d7cb5dffc-4r8bd"] Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.255618 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n9g5\" (UniqueName: \"kubernetes.io/projected/0b81f473-96f7-4d5c-9695-cac22c344ed5-kube-api-access-4n9g5\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.255654 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-config\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.255696 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-public-tls-certs\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.255747 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-combined-ca-bundle\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.255795 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-ovndb-tls-certs\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.255840 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-httpd-config\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.255874 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-internal-tls-certs\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.357796 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-ovndb-tls-certs\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.357906 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-httpd-config\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.358957 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-internal-tls-certs\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.359095 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n9g5\" (UniqueName: \"kubernetes.io/projected/0b81f473-96f7-4d5c-9695-cac22c344ed5-kube-api-access-4n9g5\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.359126 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-config\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.359192 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-public-tls-certs\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.359215 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-combined-ca-bundle\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.376378 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-ovndb-tls-certs\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.377265 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-public-tls-certs\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.383137 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-combined-ca-bundle\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.388644 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n9g5\" (UniqueName: \"kubernetes.io/projected/0b81f473-96f7-4d5c-9695-cac22c344ed5-kube-api-access-4n9g5\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.388811 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-httpd-config\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.394415 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-internal-tls-certs\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.400780 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/0b81f473-96f7-4d5c-9695-cac22c344ed5-config\") pod \"neutron-d7cb5dffc-4r8bd\" (UID: \"0b81f473-96f7-4d5c-9695-cac22c344ed5\") " pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.484506 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.736223 4755 generic.go:334] "Generic (PLEG): container finished" podID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" containerID="cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc" exitCode=0 Feb 02 22:54:28 crc kubenswrapper[4755]: I0202 22:54:28.736280 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-96d6dcbbf-js6bv" event={"ID":"dd8f8c90-543e-4125-9a9b-8c33e75c75ca","Type":"ContainerDied","Data":"cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc"} Feb 02 22:54:29 crc kubenswrapper[4755]: I0202 22:54:29.149911 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:29 crc kubenswrapper[4755]: I0202 22:54:29.243281 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-jt9ln"] Feb 02 22:54:29 crc kubenswrapper[4755]: I0202 22:54:29.243502 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" podUID="73a43f85-781b-4f83-b18d-0d69e6d272e0" containerName="dnsmasq-dns" containerID="cri-o://774ccc6d24c7409568cc73b5565ada9f92518ff2b8eb0bc4d4fb44735ddafc05" gracePeriod=10 Feb 02 22:54:29 crc kubenswrapper[4755]: I0202 22:54:29.747041 4755 generic.go:334] "Generic (PLEG): container finished" podID="bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe" containerID="446d14daa03e8b6b461fd6b5532b4b64048f99b269174b061d4c46666f735e82" exitCode=0 Feb 02 22:54:29 crc kubenswrapper[4755]: I0202 22:54:29.747177 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-75zwb" event={"ID":"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe","Type":"ContainerDied","Data":"446d14daa03e8b6b461fd6b5532b4b64048f99b269174b061d4c46666f735e82"} Feb 02 22:54:29 crc kubenswrapper[4755]: I0202 22:54:29.754756 4755 generic.go:334] "Generic (PLEG): container finished" podID="73a43f85-781b-4f83-b18d-0d69e6d272e0" containerID="774ccc6d24c7409568cc73b5565ada9f92518ff2b8eb0bc4d4fb44735ddafc05" exitCode=0 Feb 02 22:54:29 crc kubenswrapper[4755]: I0202 22:54:29.754796 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" event={"ID":"73a43f85-781b-4f83-b18d-0d69e6d272e0","Type":"ContainerDied","Data":"774ccc6d24c7409568cc73b5565ada9f92518ff2b8eb0bc4d4fb44735ddafc05"} Feb 02 22:54:30 crc kubenswrapper[4755]: I0202 22:54:30.215578 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-96d6dcbbf-js6bv" podUID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.174:9696/\": dial tcp 10.217.0.174:9696: connect: connection refused" Feb 02 22:54:30 crc kubenswrapper[4755]: I0202 22:54:30.653437 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:30 crc kubenswrapper[4755]: I0202 22:54:30.655209 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.517745 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pwclg" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.519090 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.652306 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-scripts\") pod \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.652355 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n67bd\" (UniqueName: \"kubernetes.io/projected/0403cc43-6199-4e95-b427-c4f268d8049a-kube-api-access-n67bd\") pod \"0403cc43-6199-4e95-b427-c4f268d8049a\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.652426 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-scripts\") pod \"0403cc43-6199-4e95-b427-c4f268d8049a\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.652481 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-combined-ca-bundle\") pod \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.652505 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-combined-ca-bundle\") pod \"0403cc43-6199-4e95-b427-c4f268d8049a\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.652551 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0403cc43-6199-4e95-b427-c4f268d8049a-etc-machine-id\") pod \"0403cc43-6199-4e95-b427-c4f268d8049a\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.652638 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-db-sync-config-data\") pod \"0403cc43-6199-4e95-b427-c4f268d8049a\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.652695 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-certs\") pod \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.652806 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htwfg\" (UniqueName: \"kubernetes.io/projected/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-kube-api-access-htwfg\") pod \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.652835 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-config-data\") pod \"0403cc43-6199-4e95-b427-c4f268d8049a\" (UID: \"0403cc43-6199-4e95-b427-c4f268d8049a\") " Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.652852 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-config-data\") pod \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\" (UID: \"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe\") " Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.653174 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0403cc43-6199-4e95-b427-c4f268d8049a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0403cc43-6199-4e95-b427-c4f268d8049a" (UID: "0403cc43-6199-4e95-b427-c4f268d8049a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.653299 4755 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0403cc43-6199-4e95-b427-c4f268d8049a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.661145 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-scripts" (OuterVolumeSpecName: "scripts") pod "bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe" (UID: "bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.664782 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-kube-api-access-htwfg" (OuterVolumeSpecName: "kube-api-access-htwfg") pod "bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe" (UID: "bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe"). InnerVolumeSpecName "kube-api-access-htwfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.665197 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0403cc43-6199-4e95-b427-c4f268d8049a-kube-api-access-n67bd" (OuterVolumeSpecName: "kube-api-access-n67bd") pod "0403cc43-6199-4e95-b427-c4f268d8049a" (UID: "0403cc43-6199-4e95-b427-c4f268d8049a"). InnerVolumeSpecName "kube-api-access-n67bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.666900 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-scripts" (OuterVolumeSpecName: "scripts") pod "0403cc43-6199-4e95-b427-c4f268d8049a" (UID: "0403cc43-6199-4e95-b427-c4f268d8049a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.669068 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "0403cc43-6199-4e95-b427-c4f268d8049a" (UID: "0403cc43-6199-4e95-b427-c4f268d8049a"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.673152 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-certs" (OuterVolumeSpecName: "certs") pod "bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe" (UID: "bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.704039 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe" (UID: "bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.755005 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.755034 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n67bd\" (UniqueName: \"kubernetes.io/projected/0403cc43-6199-4e95-b427-c4f268d8049a-kube-api-access-n67bd\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.755046 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.755054 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.755065 4755 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.755073 4755 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.755082 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htwfg\" (UniqueName: \"kubernetes.io/projected/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-kube-api-access-htwfg\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.755183 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0403cc43-6199-4e95-b427-c4f268d8049a" (UID: "0403cc43-6199-4e95-b427-c4f268d8049a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.812471 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-config-data" (OuterVolumeSpecName: "config-data") pod "0403cc43-6199-4e95-b427-c4f268d8049a" (UID: "0403cc43-6199-4e95-b427-c4f268d8049a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.812505 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-config-data" (OuterVolumeSpecName: "config-data") pod "bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe" (UID: "bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.826314 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-75zwb" event={"ID":"bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe","Type":"ContainerDied","Data":"300e25a36ed4b8ca65061808df7cb3dc29fa8205c246de2a2427b5910a887d98"} Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.826363 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="300e25a36ed4b8ca65061808df7cb3dc29fa8205c246de2a2427b5910a887d98" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.826441 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-75zwb" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.833162 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pwclg" event={"ID":"0403cc43-6199-4e95-b427-c4f268d8049a","Type":"ContainerDied","Data":"d36739d4bc15b804d079494ec6c033766dcd0b9408e2623c44c417fc982ec2d8"} Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.833201 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d36739d4bc15b804d079494ec6c033766dcd0b9408e2623c44c417fc982ec2d8" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.833252 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pwclg" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.856570 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.856600 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.856608 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0403cc43-6199-4e95-b427-c4f268d8049a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.890777 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-pnrv4"] Feb 02 22:54:31 crc kubenswrapper[4755]: E0202 22:54:31.891165 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0403cc43-6199-4e95-b427-c4f268d8049a" containerName="cinder-db-sync" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.891178 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="0403cc43-6199-4e95-b427-c4f268d8049a" containerName="cinder-db-sync" Feb 02 22:54:31 crc kubenswrapper[4755]: E0202 22:54:31.891199 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe" containerName="cloudkitty-db-sync" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.891207 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe" containerName="cloudkitty-db-sync" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.891557 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe" containerName="cloudkitty-db-sync" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.891575 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="0403cc43-6199-4e95-b427-c4f268d8049a" containerName="cinder-db-sync" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.892311 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.894102 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.895611 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.895867 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.896011 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-6xjk4" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.898741 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.910499 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-pnrv4"] Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.958802 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-scripts\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.958881 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-config-data\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.958945 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-combined-ca-bundle\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.958992 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/43476281-d17e-4f36-8934-215e34e77ac6-certs\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:31 crc kubenswrapper[4755]: I0202 22:54:31.959011 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwj54\" (UniqueName: \"kubernetes.io/projected/43476281-d17e-4f36-8934-215e34e77ac6-kube-api-access-zwj54\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.060752 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-scripts\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.060833 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-config-data\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.060899 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-combined-ca-bundle\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.060946 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/43476281-d17e-4f36-8934-215e34e77ac6-certs\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.060965 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwj54\" (UniqueName: \"kubernetes.io/projected/43476281-d17e-4f36-8934-215e34e77ac6-kube-api-access-zwj54\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.073151 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-scripts\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.073610 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-config-data\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.077408 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/43476281-d17e-4f36-8934-215e34e77ac6-certs\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.078198 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-combined-ca-bundle\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.101414 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwj54\" (UniqueName: \"kubernetes.io/projected/43476281-d17e-4f36-8934-215e34e77ac6-kube-api-access-zwj54\") pod \"cloudkitty-storageinit-pnrv4\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.233194 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.819785 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.821322 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.842428 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.842696 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-mf9xr" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.842839 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.853858 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.856411 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.879637 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.879753 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.879771 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-config-data\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.879802 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eb8965b-8c6b-410a-9402-03e460f3dffc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.879855 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qttfb\" (UniqueName: \"kubernetes.io/projected/7eb8965b-8c6b-410a-9402-03e460f3dffc-kube-api-access-qttfb\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.879888 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-scripts\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.970940 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-c2jt2"] Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.972632 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.980885 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qttfb\" (UniqueName: \"kubernetes.io/projected/7eb8965b-8c6b-410a-9402-03e460f3dffc-kube-api-access-qttfb\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.980946 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.980970 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-scripts\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.981003 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.981048 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.981098 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-dns-svc\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.981136 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.981153 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-config-data\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.981183 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfv44\" (UniqueName: \"kubernetes.io/projected/a6a7c105-7184-419e-a061-819d29a9a7c2-kube-api-access-rfv44\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.981205 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eb8965b-8c6b-410a-9402-03e460f3dffc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.981225 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-config\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.981264 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.983895 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-c2jt2"] Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.988885 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-scripts\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.988950 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eb8965b-8c6b-410a-9402-03e460f3dffc-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:32 crc kubenswrapper[4755]: I0202 22:54:32.999650 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-config-data\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.011422 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.023343 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.029264 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qttfb\" (UniqueName: \"kubernetes.io/projected/7eb8965b-8c6b-410a-9402-03e460f3dffc-kube-api-access-qttfb\") pod \"cinder-scheduler-0\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.085675 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfv44\" (UniqueName: \"kubernetes.io/projected/a6a7c105-7184-419e-a061-819d29a9a7c2-kube-api-access-rfv44\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.087953 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-config\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.088043 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.088118 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.088184 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.088327 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-dns-svc\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.090384 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.091199 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-config\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.091776 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.096191 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.096805 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-dns-svc\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.106388 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.122649 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfv44\" (UniqueName: \"kubernetes.io/projected/a6a7c105-7184-419e-a061-819d29a9a7c2-kube-api-access-rfv44\") pod \"dnsmasq-dns-6578955fd5-c2jt2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.134267 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.141101 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.190681 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-dns-swift-storage-0\") pod \"73a43f85-781b-4f83-b18d-0d69e6d272e0\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.190825 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-ovsdbserver-nb\") pod \"73a43f85-781b-4f83-b18d-0d69e6d272e0\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.190920 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x227b\" (UniqueName: \"kubernetes.io/projected/73a43f85-781b-4f83-b18d-0d69e6d272e0-kube-api-access-x227b\") pod \"73a43f85-781b-4f83-b18d-0d69e6d272e0\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.190957 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-ovsdbserver-sb\") pod \"73a43f85-781b-4f83-b18d-0d69e6d272e0\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.191073 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-dns-svc\") pod \"73a43f85-781b-4f83-b18d-0d69e6d272e0\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.191109 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-config\") pod \"73a43f85-781b-4f83-b18d-0d69e6d272e0\" (UID: \"73a43f85-781b-4f83-b18d-0d69e6d272e0\") " Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.212817 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73a43f85-781b-4f83-b18d-0d69e6d272e0-kube-api-access-x227b" (OuterVolumeSpecName: "kube-api-access-x227b") pod "73a43f85-781b-4f83-b18d-0d69e6d272e0" (UID: "73a43f85-781b-4f83-b18d-0d69e6d272e0"). InnerVolumeSpecName "kube-api-access-x227b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.274283 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 02 22:54:33 crc kubenswrapper[4755]: E0202 22:54:33.274688 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73a43f85-781b-4f83-b18d-0d69e6d272e0" containerName="dnsmasq-dns" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.274699 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="73a43f85-781b-4f83-b18d-0d69e6d272e0" containerName="dnsmasq-dns" Feb 02 22:54:33 crc kubenswrapper[4755]: E0202 22:54:33.274715 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73a43f85-781b-4f83-b18d-0d69e6d272e0" containerName="init" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.274721 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="73a43f85-781b-4f83-b18d-0d69e6d272e0" containerName="init" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.274933 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="73a43f85-781b-4f83-b18d-0d69e6d272e0" containerName="dnsmasq-dns" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.276065 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.278790 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.310820 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "73a43f85-781b-4f83-b18d-0d69e6d272e0" (UID: "73a43f85-781b-4f83-b18d-0d69e6d272e0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.332799 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a994ed9e-b4b5-4414-8251-a56504edef56-logs\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.332921 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a994ed9e-b4b5-4414-8251-a56504edef56-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.333007 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-scripts\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.333056 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.333185 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-config-data-custom\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.333259 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-config-data\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.333351 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gksrd\" (UniqueName: \"kubernetes.io/projected/a994ed9e-b4b5-4414-8251-a56504edef56-kube-api-access-gksrd\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.333522 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.333556 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x227b\" (UniqueName: \"kubernetes.io/projected/73a43f85-781b-4f83-b18d-0d69e6d272e0-kube-api-access-x227b\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.376980 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.382721 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "73a43f85-781b-4f83-b18d-0d69e6d272e0" (UID: "73a43f85-781b-4f83-b18d-0d69e6d272e0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.393199 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "73a43f85-781b-4f83-b18d-0d69e6d272e0" (UID: "73a43f85-781b-4f83-b18d-0d69e6d272e0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.423340 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-config" (OuterVolumeSpecName: "config") pod "73a43f85-781b-4f83-b18d-0d69e6d272e0" (UID: "73a43f85-781b-4f83-b18d-0d69e6d272e0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.435496 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a994ed9e-b4b5-4414-8251-a56504edef56-logs\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.435543 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a994ed9e-b4b5-4414-8251-a56504edef56-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.435580 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-scripts\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.435599 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.435645 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-config-data-custom\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.435672 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-config-data\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.435706 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gksrd\" (UniqueName: \"kubernetes.io/projected/a994ed9e-b4b5-4414-8251-a56504edef56-kube-api-access-gksrd\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.435834 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.435847 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.435856 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.437030 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a994ed9e-b4b5-4414-8251-a56504edef56-logs\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.440869 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a994ed9e-b4b5-4414-8251-a56504edef56-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.441773 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.442475 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-config-data-custom\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.449445 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-scripts\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.458955 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-config-data\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.479188 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "73a43f85-781b-4f83-b18d-0d69e6d272e0" (UID: "73a43f85-781b-4f83-b18d-0d69e6d272e0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.503277 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gksrd\" (UniqueName: \"kubernetes.io/projected/a994ed9e-b4b5-4414-8251-a56504edef56-kube-api-access-gksrd\") pod \"cinder-api-0\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.537516 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73a43f85-781b-4f83-b18d-0d69e6d272e0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.648380 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.775051 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-pnrv4"] Feb 02 22:54:33 crc kubenswrapper[4755]: W0202 22:54:33.819256 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43476281_d17e_4f36_8934_215e34e77ac6.slice/crio-56ab41856473bbffc7fb43b20948e478e2c4e80718e12b9607c19631ccdf9ee6 WatchSource:0}: Error finding container 56ab41856473bbffc7fb43b20948e478e2c4e80718e12b9607c19631ccdf9ee6: Status 404 returned error can't find the container with id 56ab41856473bbffc7fb43b20948e478e2c4e80718e12b9607c19631ccdf9ee6 Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.900483 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" event={"ID":"73a43f85-781b-4f83-b18d-0d69e6d272e0","Type":"ContainerDied","Data":"59fea455a5a2c9344c52db863425754f83889948c44b949ad579dcedb3886d3a"} Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.900767 4755 scope.go:117] "RemoveContainer" containerID="774ccc6d24c7409568cc73b5565ada9f92518ff2b8eb0bc4d4fb44735ddafc05" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.900920 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.924437 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8","Type":"ContainerStarted","Data":"90ec1d35f62e555c0973e2d8316b13e9340ac28862a8ca96a9a390e9b5a732b4"} Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.924608 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="ceilometer-central-agent" containerID="cri-o://52bac8d44a3fc4485ca16814830e3f2bdab966bbab186b22e4a119fc492f0fec" gracePeriod=30 Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.924909 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.925195 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="proxy-httpd" containerID="cri-o://90ec1d35f62e555c0973e2d8316b13e9340ac28862a8ca96a9a390e9b5a732b4" gracePeriod=30 Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.925246 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="sg-core" containerID="cri-o://d051e8e995cd5167ecced02fb28ad36c7b2120e447b81b7cdfd7b9d886646cbe" gracePeriod=30 Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.925281 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="ceilometer-notification-agent" containerID="cri-o://1dc6513b3c0184af868a6b39a049b284d48393cdab9d92a1ff4976d21747c80a" gracePeriod=30 Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.936631 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-pnrv4" event={"ID":"43476281-d17e-4f36-8934-215e34e77ac6","Type":"ContainerStarted","Data":"56ab41856473bbffc7fb43b20948e478e2c4e80718e12b9607c19631ccdf9ee6"} Feb 02 22:54:33 crc kubenswrapper[4755]: I0202 22:54:33.962917 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.390321896 podStartE2EDuration="1m4.962900004s" podCreationTimestamp="2026-02-02 22:53:29 +0000 UTC" firstStartedPulling="2026-02-02 22:53:31.491268629 +0000 UTC m=+1167.182488955" lastFinishedPulling="2026-02-02 22:54:33.063846737 +0000 UTC m=+1228.755067063" observedRunningTime="2026-02-02 22:54:33.951975378 +0000 UTC m=+1229.643195704" watchObservedRunningTime="2026-02-02 22:54:33.962900004 +0000 UTC m=+1229.654120330" Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.001448 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-jt9ln"] Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.024031 4755 scope.go:117] "RemoveContainer" containerID="f5e0897b9b71dd6d64819c54eca3881e2bc5b12447ee249b80f91e1a20f5042c" Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.029950 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-jt9ln"] Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.055532 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d7cb5dffc-4r8bd"] Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.145802 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-c2jt2"] Feb 02 22:54:34 crc kubenswrapper[4755]: W0202 22:54:34.146579 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6a7c105_7184_419e_a061_819d29a9a7c2.slice/crio-95127e4c3d59dbbf6dca7ec94b94d995bbff807dad2a343565797247ce509484 WatchSource:0}: Error finding container 95127e4c3d59dbbf6dca7ec94b94d995bbff807dad2a343565797247ce509484: Status 404 returned error can't find the container with id 95127e4c3d59dbbf6dca7ec94b94d995bbff807dad2a343565797247ce509484 Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.147671 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 22:54:34 crc kubenswrapper[4755]: W0202 22:54:34.201402 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7eb8965b_8c6b_410a_9402_03e460f3dffc.slice/crio-2d502db7a7699956caf59f4dc7156fbe8eb325ba110decc1d70de6bf7855d98d WatchSource:0}: Error finding container 2d502db7a7699956caf59f4dc7156fbe8eb325ba110decc1d70de6bf7855d98d: Status 404 returned error can't find the container with id 2d502db7a7699956caf59f4dc7156fbe8eb325ba110decc1d70de6bf7855d98d Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.370074 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.654428 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:34 crc kubenswrapper[4755]: E0202 22:54:34.880666 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4cd6f3d2_cc8a_4156_b237_dc09b37a80d8.slice/crio-52bac8d44a3fc4485ca16814830e3f2bdab966bbab186b22e4a119fc492f0fec.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4cd6f3d2_cc8a_4156_b237_dc09b37a80d8.slice/crio-conmon-52bac8d44a3fc4485ca16814830e3f2bdab966bbab186b22e4a119fc492f0fec.scope\": RecentStats: unable to find data in memory cache]" Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.989345 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d7cb5dffc-4r8bd" event={"ID":"0b81f473-96f7-4d5c-9695-cac22c344ed5","Type":"ContainerStarted","Data":"83d90483902fc1a67b2d32dc52799a469808b573cd706cd92af8ea9f680c5c5d"} Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.989401 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d7cb5dffc-4r8bd" event={"ID":"0b81f473-96f7-4d5c-9695-cac22c344ed5","Type":"ContainerStarted","Data":"c1abc3a4dc9a6ec5f1f5797d797e7c0e1886ab79d28d133a7f15bf85c5b6807b"} Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.997196 4755 generic.go:334] "Generic (PLEG): container finished" podID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerID="90ec1d35f62e555c0973e2d8316b13e9340ac28862a8ca96a9a390e9b5a732b4" exitCode=0 Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.997236 4755 generic.go:334] "Generic (PLEG): container finished" podID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerID="d051e8e995cd5167ecced02fb28ad36c7b2120e447b81b7cdfd7b9d886646cbe" exitCode=2 Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.997247 4755 generic.go:334] "Generic (PLEG): container finished" podID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerID="52bac8d44a3fc4485ca16814830e3f2bdab966bbab186b22e4a119fc492f0fec" exitCode=0 Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.997300 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8","Type":"ContainerDied","Data":"90ec1d35f62e555c0973e2d8316b13e9340ac28862a8ca96a9a390e9b5a732b4"} Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.997331 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8","Type":"ContainerDied","Data":"d051e8e995cd5167ecced02fb28ad36c7b2120e447b81b7cdfd7b9d886646cbe"} Feb 02 22:54:34 crc kubenswrapper[4755]: I0202 22:54:34.997344 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8","Type":"ContainerDied","Data":"52bac8d44a3fc4485ca16814830e3f2bdab966bbab186b22e4a119fc492f0fec"} Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.017375 4755 generic.go:334] "Generic (PLEG): container finished" podID="a6a7c105-7184-419e-a061-819d29a9a7c2" containerID="45a76b4b65074a1b235b20bd21f483db3a41efe6cb4c650dd636b69f783411a4" exitCode=0 Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.017476 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" event={"ID":"a6a7c105-7184-419e-a061-819d29a9a7c2","Type":"ContainerDied","Data":"45a76b4b65074a1b235b20bd21f483db3a41efe6cb4c650dd636b69f783411a4"} Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.017503 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" event={"ID":"a6a7c105-7184-419e-a061-819d29a9a7c2","Type":"ContainerStarted","Data":"95127e4c3d59dbbf6dca7ec94b94d995bbff807dad2a343565797247ce509484"} Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.028134 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7eb8965b-8c6b-410a-9402-03e460f3dffc","Type":"ContainerStarted","Data":"2d502db7a7699956caf59f4dc7156fbe8eb325ba110decc1d70de6bf7855d98d"} Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.028871 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.029366 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-pnrv4" event={"ID":"43476281-d17e-4f36-8934-215e34e77ac6","Type":"ContainerStarted","Data":"02a7be8f4ff0bf2cfddfd57c5c623e159f0d5ce252bc6e79c5531ed99ff304b3"} Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.030992 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a994ed9e-b4b5-4414-8251-a56504edef56","Type":"ContainerStarted","Data":"d34181acf09b6dd7af86cc4985031be0f112a8e346a35840e66caa90404c611c"} Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.051229 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-75f5dc8786-9gzp2" Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.069705 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-pnrv4" podStartSLOduration=4.069685702 podStartE2EDuration="4.069685702s" podCreationTimestamp="2026-02-02 22:54:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:35.064154317 +0000 UTC m=+1230.755374653" watchObservedRunningTime="2026-02-02 22:54:35.069685702 +0000 UTC m=+1230.760906018" Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.218810 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73a43f85-781b-4f83-b18d-0d69e6d272e0" path="/var/lib/kubelet/pods/73a43f85-781b-4f83-b18d-0d69e6d272e0/volumes" Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.221239 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-66999cb498-vs49f"] Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.221585 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-66999cb498-vs49f" podUID="26b37ea7-b9d1-4db2-976b-8d40431c46e2" containerName="barbican-api-log" containerID="cri-o://a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0" gracePeriod=30 Feb 02 22:54:35 crc kubenswrapper[4755]: I0202 22:54:35.221760 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-66999cb498-vs49f" podUID="26b37ea7-b9d1-4db2-976b-8d40431c46e2" containerName="barbican-api" containerID="cri-o://a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a" gracePeriod=30 Feb 02 22:54:36 crc kubenswrapper[4755]: I0202 22:54:36.060320 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d7cb5dffc-4r8bd" event={"ID":"0b81f473-96f7-4d5c-9695-cac22c344ed5","Type":"ContainerStarted","Data":"ec97337cc70b237ff14d356226d9efbb62117f014de7bbed02151fc3fdcbe792"} Feb 02 22:54:36 crc kubenswrapper[4755]: I0202 22:54:36.060855 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:36 crc kubenswrapper[4755]: I0202 22:54:36.088366 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" event={"ID":"a6a7c105-7184-419e-a061-819d29a9a7c2","Type":"ContainerStarted","Data":"d0ad6d3e3f03f7d5cb6a13a559c3e0d6e077c6f75afc740b366feb438f5c25ef"} Feb 02 22:54:36 crc kubenswrapper[4755]: I0202 22:54:36.089571 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:36 crc kubenswrapper[4755]: I0202 22:54:36.113537 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-d7cb5dffc-4r8bd" podStartSLOduration=8.113519183 podStartE2EDuration="8.113519183s" podCreationTimestamp="2026-02-02 22:54:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:36.08989543 +0000 UTC m=+1231.781115756" watchObservedRunningTime="2026-02-02 22:54:36.113519183 +0000 UTC m=+1231.804739509" Feb 02 22:54:36 crc kubenswrapper[4755]: I0202 22:54:36.114933 4755 generic.go:334] "Generic (PLEG): container finished" podID="26b37ea7-b9d1-4db2-976b-8d40431c46e2" containerID="a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0" exitCode=143 Feb 02 22:54:36 crc kubenswrapper[4755]: I0202 22:54:36.115039 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66999cb498-vs49f" event={"ID":"26b37ea7-b9d1-4db2-976b-8d40431c46e2","Type":"ContainerDied","Data":"a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0"} Feb 02 22:54:36 crc kubenswrapper[4755]: I0202 22:54:36.119779 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a994ed9e-b4b5-4414-8251-a56504edef56","Type":"ContainerStarted","Data":"21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002"} Feb 02 22:54:36 crc kubenswrapper[4755]: I0202 22:54:36.121465 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" podStartSLOduration=4.121447606 podStartE2EDuration="4.121447606s" podCreationTimestamp="2026-02-02 22:54:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:36.114709027 +0000 UTC m=+1231.805929353" watchObservedRunningTime="2026-02-02 22:54:36.121447606 +0000 UTC m=+1231.812667932" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.129226 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a994ed9e-b4b5-4414-8251-a56504edef56","Type":"ContainerStarted","Data":"802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3"} Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.129773 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a994ed9e-b4b5-4414-8251-a56504edef56" containerName="cinder-api-log" containerID="cri-o://21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002" gracePeriod=30 Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.130073 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.130402 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a994ed9e-b4b5-4414-8251-a56504edef56" containerName="cinder-api" containerID="cri-o://802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3" gracePeriod=30 Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.136328 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7eb8965b-8c6b-410a-9402-03e460f3dffc","Type":"ContainerStarted","Data":"763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369"} Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.136380 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7eb8965b-8c6b-410a-9402-03e460f3dffc","Type":"ContainerStarted","Data":"ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b"} Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.157529 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.157508088 podStartE2EDuration="4.157508088s" podCreationTimestamp="2026-02-02 22:54:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:37.153911807 +0000 UTC m=+1232.845132133" watchObservedRunningTime="2026-02-02 22:54:37.157508088 +0000 UTC m=+1232.848728414" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.178428 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.020994986 podStartE2EDuration="5.178410744s" podCreationTimestamp="2026-02-02 22:54:32 +0000 UTC" firstStartedPulling="2026-02-02 22:54:34.211226703 +0000 UTC m=+1229.902447029" lastFinishedPulling="2026-02-02 22:54:35.368642461 +0000 UTC m=+1231.059862787" observedRunningTime="2026-02-02 22:54:37.173531018 +0000 UTC m=+1232.864751344" watchObservedRunningTime="2026-02-02 22:54:37.178410744 +0000 UTC m=+1232.869631070" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.747359 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6b7b667979-jt9ln" podUID="73a43f85-781b-4f83-b18d-0d69e6d272e0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.172:5353: i/o timeout" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.800643 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.840843 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gksrd\" (UniqueName: \"kubernetes.io/projected/a994ed9e-b4b5-4414-8251-a56504edef56-kube-api-access-gksrd\") pod \"a994ed9e-b4b5-4414-8251-a56504edef56\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.841035 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-combined-ca-bundle\") pod \"a994ed9e-b4b5-4414-8251-a56504edef56\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.841092 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a994ed9e-b4b5-4414-8251-a56504edef56-etc-machine-id\") pod \"a994ed9e-b4b5-4414-8251-a56504edef56\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.841161 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-config-data\") pod \"a994ed9e-b4b5-4414-8251-a56504edef56\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.841213 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-scripts\") pod \"a994ed9e-b4b5-4414-8251-a56504edef56\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.841326 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-config-data-custom\") pod \"a994ed9e-b4b5-4414-8251-a56504edef56\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.841355 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a994ed9e-b4b5-4414-8251-a56504edef56-logs\") pod \"a994ed9e-b4b5-4414-8251-a56504edef56\" (UID: \"a994ed9e-b4b5-4414-8251-a56504edef56\") " Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.841205 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a994ed9e-b4b5-4414-8251-a56504edef56-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a994ed9e-b4b5-4414-8251-a56504edef56" (UID: "a994ed9e-b4b5-4414-8251-a56504edef56"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.842155 4755 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a994ed9e-b4b5-4414-8251-a56504edef56-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.842477 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a994ed9e-b4b5-4414-8251-a56504edef56-logs" (OuterVolumeSpecName: "logs") pod "a994ed9e-b4b5-4414-8251-a56504edef56" (UID: "a994ed9e-b4b5-4414-8251-a56504edef56"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.847441 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a994ed9e-b4b5-4414-8251-a56504edef56" (UID: "a994ed9e-b4b5-4414-8251-a56504edef56"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.848291 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-scripts" (OuterVolumeSpecName: "scripts") pod "a994ed9e-b4b5-4414-8251-a56504edef56" (UID: "a994ed9e-b4b5-4414-8251-a56504edef56"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.848806 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a994ed9e-b4b5-4414-8251-a56504edef56-kube-api-access-gksrd" (OuterVolumeSpecName: "kube-api-access-gksrd") pod "a994ed9e-b4b5-4414-8251-a56504edef56" (UID: "a994ed9e-b4b5-4414-8251-a56504edef56"). InnerVolumeSpecName "kube-api-access-gksrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.892876 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a994ed9e-b4b5-4414-8251-a56504edef56" (UID: "a994ed9e-b4b5-4414-8251-a56504edef56"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.937007 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-config-data" (OuterVolumeSpecName: "config-data") pod "a994ed9e-b4b5-4414-8251-a56504edef56" (UID: "a994ed9e-b4b5-4414-8251-a56504edef56"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.945178 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.945213 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.945223 4755 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.945233 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a994ed9e-b4b5-4414-8251-a56504edef56-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.945241 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gksrd\" (UniqueName: \"kubernetes.io/projected/a994ed9e-b4b5-4414-8251-a56504edef56-kube-api-access-gksrd\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:37 crc kubenswrapper[4755]: I0202 22:54:37.945249 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a994ed9e-b4b5-4414-8251-a56504edef56-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.106973 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.155577 4755 generic.go:334] "Generic (PLEG): container finished" podID="a994ed9e-b4b5-4414-8251-a56504edef56" containerID="802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3" exitCode=0 Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.155621 4755 generic.go:334] "Generic (PLEG): container finished" podID="a994ed9e-b4b5-4414-8251-a56504edef56" containerID="21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002" exitCode=143 Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.155712 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a994ed9e-b4b5-4414-8251-a56504edef56","Type":"ContainerDied","Data":"802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3"} Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.155775 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a994ed9e-b4b5-4414-8251-a56504edef56","Type":"ContainerDied","Data":"21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002"} Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.155811 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a994ed9e-b4b5-4414-8251-a56504edef56","Type":"ContainerDied","Data":"d34181acf09b6dd7af86cc4985031be0f112a8e346a35840e66caa90404c611c"} Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.155833 4755 scope.go:117] "RemoveContainer" containerID="802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.156078 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.160993 4755 generic.go:334] "Generic (PLEG): container finished" podID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerID="1dc6513b3c0184af868a6b39a049b284d48393cdab9d92a1ff4976d21747c80a" exitCode=0 Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.161256 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8","Type":"ContainerDied","Data":"1dc6513b3c0184af868a6b39a049b284d48393cdab9d92a1ff4976d21747c80a"} Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.167278 4755 generic.go:334] "Generic (PLEG): container finished" podID="43476281-d17e-4f36-8934-215e34e77ac6" containerID="02a7be8f4ff0bf2cfddfd57c5c623e159f0d5ce252bc6e79c5531ed99ff304b3" exitCode=0 Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.167370 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-pnrv4" event={"ID":"43476281-d17e-4f36-8934-215e34e77ac6","Type":"ContainerDied","Data":"02a7be8f4ff0bf2cfddfd57c5c623e159f0d5ce252bc6e79c5531ed99ff304b3"} Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.191201 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.196940 4755 scope.go:117] "RemoveContainer" containerID="21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.211259 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.235820 4755 scope.go:117] "RemoveContainer" containerID="802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3" Feb 02 22:54:38 crc kubenswrapper[4755]: E0202 22:54:38.236402 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3\": container with ID starting with 802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3 not found: ID does not exist" containerID="802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.236427 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3"} err="failed to get container status \"802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3\": rpc error: code = NotFound desc = could not find container \"802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3\": container with ID starting with 802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3 not found: ID does not exist" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.236448 4755 scope.go:117] "RemoveContainer" containerID="21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002" Feb 02 22:54:38 crc kubenswrapper[4755]: E0202 22:54:38.236738 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002\": container with ID starting with 21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002 not found: ID does not exist" containerID="21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.236755 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002"} err="failed to get container status \"21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002\": rpc error: code = NotFound desc = could not find container \"21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002\": container with ID starting with 21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002 not found: ID does not exist" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.236767 4755 scope.go:117] "RemoveContainer" containerID="802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.239825 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.241125 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3"} err="failed to get container status \"802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3\": rpc error: code = NotFound desc = could not find container \"802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3\": container with ID starting with 802e105a66e84eafb9de9f645acf9c09a0ebb16ddf3787d6287b4300df5255d3 not found: ID does not exist" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.241227 4755 scope.go:117] "RemoveContainer" containerID="21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.244932 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002"} err="failed to get container status \"21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002\": rpc error: code = NotFound desc = could not find container \"21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002\": container with ID starting with 21028ff8cced81c505cc1f9535722821815ba97d2e303424eb517ed86ccef002 not found: ID does not exist" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.260909 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-run-httpd\") pod \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.260992 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-sg-core-conf-yaml\") pod \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.261027 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-log-httpd\") pod \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.261053 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-combined-ca-bundle\") pod \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.261094 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-config-data\") pod \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.261174 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqqhl\" (UniqueName: \"kubernetes.io/projected/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-kube-api-access-tqqhl\") pod \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.261220 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-scripts\") pod \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\" (UID: \"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8\") " Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.265666 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" (UID: "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.271663 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-kube-api-access-tqqhl" (OuterVolumeSpecName: "kube-api-access-tqqhl") pod "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" (UID: "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8"). InnerVolumeSpecName "kube-api-access-tqqhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.272335 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" (UID: "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.275925 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-scripts" (OuterVolumeSpecName: "scripts") pod "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" (UID: "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.280970 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 02 22:54:38 crc kubenswrapper[4755]: E0202 22:54:38.281386 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="proxy-httpd" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.281404 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="proxy-httpd" Feb 02 22:54:38 crc kubenswrapper[4755]: E0202 22:54:38.281417 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="ceilometer-notification-agent" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.281424 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="ceilometer-notification-agent" Feb 02 22:54:38 crc kubenswrapper[4755]: E0202 22:54:38.281436 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a994ed9e-b4b5-4414-8251-a56504edef56" containerName="cinder-api-log" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.281442 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a994ed9e-b4b5-4414-8251-a56504edef56" containerName="cinder-api-log" Feb 02 22:54:38 crc kubenswrapper[4755]: E0202 22:54:38.281456 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a994ed9e-b4b5-4414-8251-a56504edef56" containerName="cinder-api" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.281462 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a994ed9e-b4b5-4414-8251-a56504edef56" containerName="cinder-api" Feb 02 22:54:38 crc kubenswrapper[4755]: E0202 22:54:38.281475 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="ceilometer-central-agent" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.281481 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="ceilometer-central-agent" Feb 02 22:54:38 crc kubenswrapper[4755]: E0202 22:54:38.281504 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="sg-core" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.281511 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="sg-core" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.281671 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="ceilometer-central-agent" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.281684 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a994ed9e-b4b5-4414-8251-a56504edef56" containerName="cinder-api-log" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.281693 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="proxy-httpd" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.281710 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="ceilometer-notification-agent" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.281737 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" containerName="sg-core" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.281748 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a994ed9e-b4b5-4414-8251-a56504edef56" containerName="cinder-api" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.283223 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.297228 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.300153 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" (UID: "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.302619 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.302809 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.302909 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.344569 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" (UID: "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364501 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8df73ba5-ff8c-49a8-a923-2e7c957fb043-logs\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364559 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364587 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-config-data\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364631 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-config-data-custom\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364690 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf487\" (UniqueName: \"kubernetes.io/projected/8df73ba5-ff8c-49a8-a923-2e7c957fb043-kube-api-access-xf487\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364704 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364774 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364790 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8df73ba5-ff8c-49a8-a923-2e7c957fb043-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364836 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-scripts\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364888 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqqhl\" (UniqueName: \"kubernetes.io/projected/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-kube-api-access-tqqhl\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364899 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364907 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364915 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364923 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.364931 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.385334 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-config-data" (OuterVolumeSpecName: "config-data") pod "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" (UID: "4cd6f3d2-cc8a-4156-b237-dc09b37a80d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.467739 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.467815 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-config-data\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.467872 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-config-data-custom\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.468035 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf487\" (UniqueName: \"kubernetes.io/projected/8df73ba5-ff8c-49a8-a923-2e7c957fb043-kube-api-access-xf487\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.468057 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.468131 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.468155 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8df73ba5-ff8c-49a8-a923-2e7c957fb043-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.468282 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-scripts\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.468356 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8df73ba5-ff8c-49a8-a923-2e7c957fb043-logs\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.468417 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.468777 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8df73ba5-ff8c-49a8-a923-2e7c957fb043-logs\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.469752 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8df73ba5-ff8c-49a8-a923-2e7c957fb043-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.472709 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.473082 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.473161 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.473200 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-scripts\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.476688 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-config-data-custom\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.476968 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8df73ba5-ff8c-49a8-a923-2e7c957fb043-config-data\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.485652 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf487\" (UniqueName: \"kubernetes.io/projected/8df73ba5-ff8c-49a8-a923-2e7c957fb043-kube-api-access-xf487\") pod \"cinder-api-0\" (UID: \"8df73ba5-ff8c-49a8-a923-2e7c957fb043\") " pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.604207 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 02 22:54:38 crc kubenswrapper[4755]: I0202 22:54:38.972327 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.080788 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26b37ea7-b9d1-4db2-976b-8d40431c46e2-logs\") pod \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.080835 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-config-data\") pod \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.080992 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b24k\" (UniqueName: \"kubernetes.io/projected/26b37ea7-b9d1-4db2-976b-8d40431c46e2-kube-api-access-2b24k\") pod \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.081084 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-combined-ca-bundle\") pod \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.081182 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-config-data-custom\") pod \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\" (UID: \"26b37ea7-b9d1-4db2-976b-8d40431c46e2\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.081563 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26b37ea7-b9d1-4db2-976b-8d40431c46e2-logs" (OuterVolumeSpecName: "logs") pod "26b37ea7-b9d1-4db2-976b-8d40431c46e2" (UID: "26b37ea7-b9d1-4db2-976b-8d40431c46e2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.086433 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "26b37ea7-b9d1-4db2-976b-8d40431c46e2" (UID: "26b37ea7-b9d1-4db2-976b-8d40431c46e2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.086943 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26b37ea7-b9d1-4db2-976b-8d40431c46e2-kube-api-access-2b24k" (OuterVolumeSpecName: "kube-api-access-2b24k") pod "26b37ea7-b9d1-4db2-976b-8d40431c46e2" (UID: "26b37ea7-b9d1-4db2-976b-8d40431c46e2"). InnerVolumeSpecName "kube-api-access-2b24k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.092554 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a994ed9e-b4b5-4414-8251-a56504edef56" path="/var/lib/kubelet/pods/a994ed9e-b4b5-4414-8251-a56504edef56/volumes" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.151941 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "26b37ea7-b9d1-4db2-976b-8d40431c46e2" (UID: "26b37ea7-b9d1-4db2-976b-8d40431c46e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.172265 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.179785 4755 generic.go:334] "Generic (PLEG): container finished" podID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" containerID="b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5" exitCode=0 Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.179824 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-96d6dcbbf-js6bv" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.179855 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-96d6dcbbf-js6bv" event={"ID":"dd8f8c90-543e-4125-9a9b-8c33e75c75ca","Type":"ContainerDied","Data":"b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5"} Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.179881 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-96d6dcbbf-js6bv" event={"ID":"dd8f8c90-543e-4125-9a9b-8c33e75c75ca","Type":"ContainerDied","Data":"c7a78759ba5bee3ec6248578d1f143b92bf1192ae87a9d85e5e0416d10432241"} Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.179898 4755 scope.go:117] "RemoveContainer" containerID="cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.184428 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b24k\" (UniqueName: \"kubernetes.io/projected/26b37ea7-b9d1-4db2-976b-8d40431c46e2-kube-api-access-2b24k\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.184509 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.184534 4755 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.184548 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26b37ea7-b9d1-4db2-976b-8d40431c46e2-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.190640 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4cd6f3d2-cc8a-4156-b237-dc09b37a80d8","Type":"ContainerDied","Data":"6ccfc9d528e12911b342e8cea0860a5123db5d641773654b47bdfb7ada2cb939"} Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.190799 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.213088 4755 generic.go:334] "Generic (PLEG): container finished" podID="26b37ea7-b9d1-4db2-976b-8d40431c46e2" containerID="a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a" exitCode=0 Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.217907 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-66999cb498-vs49f" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.218417 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66999cb498-vs49f" event={"ID":"26b37ea7-b9d1-4db2-976b-8d40431c46e2","Type":"ContainerDied","Data":"a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a"} Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.218450 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66999cb498-vs49f" event={"ID":"26b37ea7-b9d1-4db2-976b-8d40431c46e2","Type":"ContainerDied","Data":"2fd05ce15bb5124f452435130890773bea33dd305f52396efb4b43eb0cf0fb2a"} Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.220970 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-config-data" (OuterVolumeSpecName: "config-data") pod "26b37ea7-b9d1-4db2-976b-8d40431c46e2" (UID: "26b37ea7-b9d1-4db2-976b-8d40431c46e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.231247 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.236189 4755 scope.go:117] "RemoveContainer" containerID="b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.256581 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.268754 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:54:39 crc kubenswrapper[4755]: E0202 22:54:39.269202 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" containerName="neutron-api" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.269220 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" containerName="neutron-api" Feb 02 22:54:39 crc kubenswrapper[4755]: E0202 22:54:39.269249 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26b37ea7-b9d1-4db2-976b-8d40431c46e2" containerName="barbican-api" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.269255 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="26b37ea7-b9d1-4db2-976b-8d40431c46e2" containerName="barbican-api" Feb 02 22:54:39 crc kubenswrapper[4755]: E0202 22:54:39.269264 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" containerName="neutron-httpd" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.269269 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" containerName="neutron-httpd" Feb 02 22:54:39 crc kubenswrapper[4755]: E0202 22:54:39.269288 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26b37ea7-b9d1-4db2-976b-8d40431c46e2" containerName="barbican-api-log" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.269296 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="26b37ea7-b9d1-4db2-976b-8d40431c46e2" containerName="barbican-api-log" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.269483 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="26b37ea7-b9d1-4db2-976b-8d40431c46e2" containerName="barbican-api-log" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.269500 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" containerName="neutron-api" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.269510 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" containerName="neutron-httpd" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.269517 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="26b37ea7-b9d1-4db2-976b-8d40431c46e2" containerName="barbican-api" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.271378 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.274537 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.274848 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.282443 4755 scope.go:117] "RemoveContainer" containerID="cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc" Feb 02 22:54:39 crc kubenswrapper[4755]: E0202 22:54:39.283978 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc\": container with ID starting with cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc not found: ID does not exist" containerID="cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.284087 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc"} err="failed to get container status \"cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc\": rpc error: code = NotFound desc = could not find container \"cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc\": container with ID starting with cd8dba955bdd475e250cf9ac3b118aec4b6716623e5bb9820f4b0d1ce85596fc not found: ID does not exist" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.284109 4755 scope.go:117] "RemoveContainer" containerID="b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.285470 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-internal-tls-certs\") pod \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.285544 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-combined-ca-bundle\") pod \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.285585 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jtz9\" (UniqueName: \"kubernetes.io/projected/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-kube-api-access-7jtz9\") pod \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.285631 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-config\") pod \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.285654 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-ovndb-tls-certs\") pod \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.285692 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-httpd-config\") pod \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.285743 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-public-tls-certs\") pod \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.286717 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26b37ea7-b9d1-4db2-976b-8d40431c46e2-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: E0202 22:54:39.296543 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5\": container with ID starting with b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5 not found: ID does not exist" containerID="b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.296594 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5"} err="failed to get container status \"b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5\": rpc error: code = NotFound desc = could not find container \"b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5\": container with ID starting with b78c8e6a497ccc37d0e11270409e7a0aab7e4315d3b7ea3284b87340b6a379c5 not found: ID does not exist" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.296622 4755 scope.go:117] "RemoveContainer" containerID="90ec1d35f62e555c0973e2d8316b13e9340ac28862a8ca96a9a390e9b5a732b4" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.296949 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.309701 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "dd8f8c90-543e-4125-9a9b-8c33e75c75ca" (UID: "dd8f8c90-543e-4125-9a9b-8c33e75c75ca"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.315799 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-kube-api-access-7jtz9" (OuterVolumeSpecName: "kube-api-access-7jtz9") pod "dd8f8c90-543e-4125-9a9b-8c33e75c75ca" (UID: "dd8f8c90-543e-4125-9a9b-8c33e75c75ca"). InnerVolumeSpecName "kube-api-access-7jtz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.333011 4755 scope.go:117] "RemoveContainer" containerID="d051e8e995cd5167ecced02fb28ad36c7b2120e447b81b7cdfd7b9d886646cbe" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.338332 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.354345 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "dd8f8c90-543e-4125-9a9b-8c33e75c75ca" (UID: "dd8f8c90-543e-4125-9a9b-8c33e75c75ca"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.356304 4755 scope.go:117] "RemoveContainer" containerID="1dc6513b3c0184af868a6b39a049b284d48393cdab9d92a1ff4976d21747c80a" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.357787 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-config" (OuterVolumeSpecName: "config") pod "dd8f8c90-543e-4125-9a9b-8c33e75c75ca" (UID: "dd8f8c90-543e-4125-9a9b-8c33e75c75ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.379185 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "dd8f8c90-543e-4125-9a9b-8c33e75c75ca" (UID: "dd8f8c90-543e-4125-9a9b-8c33e75c75ca"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.381649 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd8f8c90-543e-4125-9a9b-8c33e75c75ca" (UID: "dd8f8c90-543e-4125-9a9b-8c33e75c75ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.387748 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "dd8f8c90-543e-4125-9a9b-8c33e75c75ca" (UID: "dd8f8c90-543e-4125-9a9b-8c33e75c75ca"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.388288 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-ovndb-tls-certs\") pod \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\" (UID: \"dd8f8c90-543e-4125-9a9b-8c33e75c75ca\") " Feb 02 22:54:39 crc kubenswrapper[4755]: W0202 22:54:39.388401 4755 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/dd8f8c90-543e-4125-9a9b-8c33e75c75ca/volumes/kubernetes.io~secret/ovndb-tls-certs Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.388470 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "dd8f8c90-543e-4125-9a9b-8c33e75c75ca" (UID: "dd8f8c90-543e-4125-9a9b-8c33e75c75ca"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.388850 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/719a114e-7ced-4950-9bce-749ce1431af8-run-httpd\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.388902 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.388950 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.389019 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-config-data\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.389043 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tl542\" (UniqueName: \"kubernetes.io/projected/719a114e-7ced-4950-9bce-749ce1431af8-kube-api-access-tl542\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.389114 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/719a114e-7ced-4950-9bce-749ce1431af8-log-httpd\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.389139 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-scripts\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.389185 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.389197 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jtz9\" (UniqueName: \"kubernetes.io/projected/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-kube-api-access-7jtz9\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.389208 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.389216 4755 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.389225 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.389233 4755 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.389241 4755 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd8f8c90-543e-4125-9a9b-8c33e75c75ca-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.391418 4755 scope.go:117] "RemoveContainer" containerID="52bac8d44a3fc4485ca16814830e3f2bdab966bbab186b22e4a119fc492f0fec" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.411846 4755 scope.go:117] "RemoveContainer" containerID="a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.433256 4755 scope.go:117] "RemoveContainer" containerID="a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.459446 4755 scope.go:117] "RemoveContainer" containerID="a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a" Feb 02 22:54:39 crc kubenswrapper[4755]: E0202 22:54:39.471103 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a\": container with ID starting with a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a not found: ID does not exist" containerID="a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.471138 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a"} err="failed to get container status \"a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a\": rpc error: code = NotFound desc = could not find container \"a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a\": container with ID starting with a8e555a10cddffc2a26ace3dd724cfaa5c321dc6b36d460b77d3e176effd042a not found: ID does not exist" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.471165 4755 scope.go:117] "RemoveContainer" containerID="a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0" Feb 02 22:54:39 crc kubenswrapper[4755]: E0202 22:54:39.471894 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0\": container with ID starting with a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0 not found: ID does not exist" containerID="a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.471923 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0"} err="failed to get container status \"a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0\": rpc error: code = NotFound desc = could not find container \"a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0\": container with ID starting with a24ab594892b4fcc80d70e06b80c1a35dacad0d4ae7d18eef01b7223e8f40ed0 not found: ID does not exist" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.491351 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.491414 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-config-data\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.491446 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tl542\" (UniqueName: \"kubernetes.io/projected/719a114e-7ced-4950-9bce-749ce1431af8-kube-api-access-tl542\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.491527 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/719a114e-7ced-4950-9bce-749ce1431af8-log-httpd\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.491551 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-scripts\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.491573 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/719a114e-7ced-4950-9bce-749ce1431af8-run-httpd\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.491610 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.492211 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/719a114e-7ced-4950-9bce-749ce1431af8-run-httpd\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.492246 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/719a114e-7ced-4950-9bce-749ce1431af8-log-httpd\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.495592 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.495823 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-config-data\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.509306 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.512158 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tl542\" (UniqueName: \"kubernetes.io/projected/719a114e-7ced-4950-9bce-749ce1431af8-kube-api-access-tl542\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.513299 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-scripts\") pod \"ceilometer-0\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.514808 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-96d6dcbbf-js6bv"] Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.522735 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-96d6dcbbf-js6bv"] Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.556794 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-66999cb498-vs49f"] Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.565238 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-66999cb498-vs49f"] Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.586842 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.596071 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.702003 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwj54\" (UniqueName: \"kubernetes.io/projected/43476281-d17e-4f36-8934-215e34e77ac6-kube-api-access-zwj54\") pod \"43476281-d17e-4f36-8934-215e34e77ac6\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.702280 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-config-data\") pod \"43476281-d17e-4f36-8934-215e34e77ac6\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.702382 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-combined-ca-bundle\") pod \"43476281-d17e-4f36-8934-215e34e77ac6\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.702500 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-scripts\") pod \"43476281-d17e-4f36-8934-215e34e77ac6\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.702524 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/43476281-d17e-4f36-8934-215e34e77ac6-certs\") pod \"43476281-d17e-4f36-8934-215e34e77ac6\" (UID: \"43476281-d17e-4f36-8934-215e34e77ac6\") " Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.707092 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43476281-d17e-4f36-8934-215e34e77ac6-certs" (OuterVolumeSpecName: "certs") pod "43476281-d17e-4f36-8934-215e34e77ac6" (UID: "43476281-d17e-4f36-8934-215e34e77ac6"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.708854 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-scripts" (OuterVolumeSpecName: "scripts") pod "43476281-d17e-4f36-8934-215e34e77ac6" (UID: "43476281-d17e-4f36-8934-215e34e77ac6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.709299 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43476281-d17e-4f36-8934-215e34e77ac6-kube-api-access-zwj54" (OuterVolumeSpecName: "kube-api-access-zwj54") pod "43476281-d17e-4f36-8934-215e34e77ac6" (UID: "43476281-d17e-4f36-8934-215e34e77ac6"). InnerVolumeSpecName "kube-api-access-zwj54". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.734790 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43476281-d17e-4f36-8934-215e34e77ac6" (UID: "43476281-d17e-4f36-8934-215e34e77ac6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.748296 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-config-data" (OuterVolumeSpecName: "config-data") pod "43476281-d17e-4f36-8934-215e34e77ac6" (UID: "43476281-d17e-4f36-8934-215e34e77ac6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.814630 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.814672 4755 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/43476281-d17e-4f36-8934-215e34e77ac6-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.814682 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwj54\" (UniqueName: \"kubernetes.io/projected/43476281-d17e-4f36-8934-215e34e77ac6-kube-api-access-zwj54\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.814699 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:39 crc kubenswrapper[4755]: I0202 22:54:39.814709 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43476281-d17e-4f36-8934-215e34e77ac6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.058523 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.255412 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"719a114e-7ced-4950-9bce-749ce1431af8","Type":"ContainerStarted","Data":"3e9a3625104568895fd1543787cdec5f042a74c06f1be3d2341b4ec5690ad374"} Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.260887 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-pnrv4" event={"ID":"43476281-d17e-4f36-8934-215e34e77ac6","Type":"ContainerDied","Data":"56ab41856473bbffc7fb43b20948e478e2c4e80718e12b9607c19631ccdf9ee6"} Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.260935 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-pnrv4" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.261049 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56ab41856473bbffc7fb43b20948e478e2c4e80718e12b9607c19631ccdf9ee6" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.266532 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8df73ba5-ff8c-49a8-a923-2e7c957fb043","Type":"ContainerStarted","Data":"4b2cca55ddbc3c4e67328d834e2b0e42ffc84293ec5c0c2c50ce3f63e42e179f"} Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.266589 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8df73ba5-ff8c-49a8-a923-2e7c957fb043","Type":"ContainerStarted","Data":"28d39583bcc6df36253435efa470a1627a60c7d353d3aea537211c73cd8c4680"} Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.426501 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:54:40 crc kubenswrapper[4755]: E0202 22:54:40.427157 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43476281-d17e-4f36-8934-215e34e77ac6" containerName="cloudkitty-storageinit" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.427176 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="43476281-d17e-4f36-8934-215e34e77ac6" containerName="cloudkitty-storageinit" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.427372 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="43476281-d17e-4f36-8934-215e34e77ac6" containerName="cloudkitty-storageinit" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.428159 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.449502 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.449555 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.449762 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.449963 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-6xjk4" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.450138 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.468508 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.512786 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-c2jt2"] Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.513047 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" podUID="a6a7c105-7184-419e-a061-819d29a9a7c2" containerName="dnsmasq-dns" containerID="cri-o://d0ad6d3e3f03f7d5cb6a13a559c3e0d6e077c6f75afc740b366feb438f5c25ef" gracePeriod=10 Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.519935 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.531885 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-scripts\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.531924 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.531992 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-config-data\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.532053 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqpd7\" (UniqueName: \"kubernetes.io/projected/184566af-1588-4a8c-9b65-3de6f1bda382-kube-api-access-pqpd7\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.532071 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.532119 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/184566af-1588-4a8c-9b65-3de6f1bda382-certs\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.609675 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58bd69657f-l4lch"] Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.611516 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.634070 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqpd7\" (UniqueName: \"kubernetes.io/projected/184566af-1588-4a8c-9b65-3de6f1bda382-kube-api-access-pqpd7\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.634124 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.634198 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/184566af-1588-4a8c-9b65-3de6f1bda382-certs\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.634244 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-ovsdbserver-sb\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.634285 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-ovsdbserver-nb\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.634314 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-scripts\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.634336 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.634406 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-dns-svc\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.634431 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx2xw\" (UniqueName: \"kubernetes.io/projected/344ddf7e-967c-4278-90f8-92951d46fa13-kube-api-access-fx2xw\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.634452 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-config-data\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.634476 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-config\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.634551 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-dns-swift-storage-0\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.649327 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.651793 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-config-data\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.659197 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-scripts\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.660436 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.666082 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58bd69657f-l4lch"] Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.679781 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/184566af-1588-4a8c-9b65-3de6f1bda382-certs\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.739472 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-dns-svc\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.739523 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx2xw\" (UniqueName: \"kubernetes.io/projected/344ddf7e-967c-4278-90f8-92951d46fa13-kube-api-access-fx2xw\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.739553 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-config\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.739605 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-dns-swift-storage-0\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.740411 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqpd7\" (UniqueName: \"kubernetes.io/projected/184566af-1588-4a8c-9b65-3de6f1bda382-kube-api-access-pqpd7\") pod \"cloudkitty-proc-0\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.740878 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-ovsdbserver-sb\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.740938 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-ovsdbserver-nb\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.742004 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-ovsdbserver-nb\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.742521 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-dns-swift-storage-0\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.743363 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-dns-svc\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.744526 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-ovsdbserver-sb\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.749148 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-config\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.777378 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx2xw\" (UniqueName: \"kubernetes.io/projected/344ddf7e-967c-4278-90f8-92951d46fa13-kube-api-access-fx2xw\") pod \"dnsmasq-dns-58bd69657f-l4lch\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.788202 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.896224 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.902492 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.906985 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.943377 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.945419 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.945889 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.945930 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.945973 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-config-data\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.946004 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5aae111c-2d5d-4b70-84ac-43d6db536b7b-logs\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.946035 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9h58\" (UniqueName: \"kubernetes.io/projected/5aae111c-2d5d-4b70-84ac-43d6db536b7b-kube-api-access-c9h58\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.946329 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-scripts\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:40 crc kubenswrapper[4755]: I0202 22:54:40.946415 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5aae111c-2d5d-4b70-84ac-43d6db536b7b-certs\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.048372 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.049280 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.049621 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-config-data\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.049666 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5aae111c-2d5d-4b70-84ac-43d6db536b7b-logs\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.049700 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9h58\" (UniqueName: \"kubernetes.io/projected/5aae111c-2d5d-4b70-84ac-43d6db536b7b-kube-api-access-c9h58\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.052083 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-scripts\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.052161 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5aae111c-2d5d-4b70-84ac-43d6db536b7b-certs\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.053591 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5aae111c-2d5d-4b70-84ac-43d6db536b7b-logs\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.055705 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-config-data\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.059289 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.059384 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.066063 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-scripts\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.075048 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5aae111c-2d5d-4b70-84ac-43d6db536b7b-certs\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.099808 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9h58\" (UniqueName: \"kubernetes.io/projected/5aae111c-2d5d-4b70-84ac-43d6db536b7b-kube-api-access-c9h58\") pod \"cloudkitty-api-0\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.104602 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26b37ea7-b9d1-4db2-976b-8d40431c46e2" path="/var/lib/kubelet/pods/26b37ea7-b9d1-4db2-976b-8d40431c46e2/volumes" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.105791 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cd6f3d2-cc8a-4156-b237-dc09b37a80d8" path="/var/lib/kubelet/pods/4cd6f3d2-cc8a-4156-b237-dc09b37a80d8/volumes" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.107203 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd8f8c90-543e-4125-9a9b-8c33e75c75ca" path="/var/lib/kubelet/pods/dd8f8c90-543e-4125-9a9b-8c33e75c75ca/volumes" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.279538 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.307256 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"719a114e-7ced-4950-9bce-749ce1431af8","Type":"ContainerStarted","Data":"40543d008b0bb18e4894c6386b05a4baf60b9b181756b20c142fe801819c54bc"} Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.308797 4755 generic.go:334] "Generic (PLEG): container finished" podID="a6a7c105-7184-419e-a061-819d29a9a7c2" containerID="d0ad6d3e3f03f7d5cb6a13a559c3e0d6e077c6f75afc740b366feb438f5c25ef" exitCode=0 Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.308844 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" event={"ID":"a6a7c105-7184-419e-a061-819d29a9a7c2","Type":"ContainerDied","Data":"d0ad6d3e3f03f7d5cb6a13a559c3e0d6e077c6f75afc740b366feb438f5c25ef"} Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.313845 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.335260 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.340585 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.340567749 podStartE2EDuration="3.340567749s" podCreationTimestamp="2026-02-02 22:54:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:41.333276874 +0000 UTC m=+1237.024497200" watchObservedRunningTime="2026-02-02 22:54:41.340567749 +0000 UTC m=+1237.031788075" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.415452 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-dns-svc\") pod \"a6a7c105-7184-419e-a061-819d29a9a7c2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.415537 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfv44\" (UniqueName: \"kubernetes.io/projected/a6a7c105-7184-419e-a061-819d29a9a7c2-kube-api-access-rfv44\") pod \"a6a7c105-7184-419e-a061-819d29a9a7c2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.415622 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-ovsdbserver-sb\") pod \"a6a7c105-7184-419e-a061-819d29a9a7c2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.415662 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-ovsdbserver-nb\") pod \"a6a7c105-7184-419e-a061-819d29a9a7c2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.415681 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-config\") pod \"a6a7c105-7184-419e-a061-819d29a9a7c2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.415770 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-dns-swift-storage-0\") pod \"a6a7c105-7184-419e-a061-819d29a9a7c2\" (UID: \"a6a7c105-7184-419e-a061-819d29a9a7c2\") " Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.428363 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6a7c105-7184-419e-a061-819d29a9a7c2-kube-api-access-rfv44" (OuterVolumeSpecName: "kube-api-access-rfv44") pod "a6a7c105-7184-419e-a061-819d29a9a7c2" (UID: "a6a7c105-7184-419e-a061-819d29a9a7c2"). InnerVolumeSpecName "kube-api-access-rfv44". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.467799 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-config" (OuterVolumeSpecName: "config") pod "a6a7c105-7184-419e-a061-819d29a9a7c2" (UID: "a6a7c105-7184-419e-a061-819d29a9a7c2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.492335 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a6a7c105-7184-419e-a061-819d29a9a7c2" (UID: "a6a7c105-7184-419e-a061-819d29a9a7c2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.524000 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfv44\" (UniqueName: \"kubernetes.io/projected/a6a7c105-7184-419e-a061-819d29a9a7c2-kube-api-access-rfv44\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.524244 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.524256 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.525559 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a6a7c105-7184-419e-a061-819d29a9a7c2" (UID: "a6a7c105-7184-419e-a061-819d29a9a7c2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.539585 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a6a7c105-7184-419e-a061-819d29a9a7c2" (UID: "a6a7c105-7184-419e-a061-819d29a9a7c2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.595585 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.626787 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a6a7c105-7184-419e-a061-819d29a9a7c2" (UID: "a6a7c105-7184-419e-a061-819d29a9a7c2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.628024 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.628038 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.628051 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6a7c105-7184-419e-a061-819d29a9a7c2-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:41 crc kubenswrapper[4755]: I0202 22:54:41.903386 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58bd69657f-l4lch"] Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.054435 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.326113 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"719a114e-7ced-4950-9bce-749ce1431af8","Type":"ContainerStarted","Data":"17317f6e9977ef6b00563c3e374efc7d5d24c99fff0798c311ab9d82240af718"} Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.327123 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"184566af-1588-4a8c-9b65-3de6f1bda382","Type":"ContainerStarted","Data":"9c4509fca89c56ea87bf9af940e48e4796a91fe267bcdef23ef6de4b640bfb1d"} Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.334231 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" event={"ID":"a6a7c105-7184-419e-a061-819d29a9a7c2","Type":"ContainerDied","Data":"95127e4c3d59dbbf6dca7ec94b94d995bbff807dad2a343565797247ce509484"} Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.334278 4755 scope.go:117] "RemoveContainer" containerID="d0ad6d3e3f03f7d5cb6a13a559c3e0d6e077c6f75afc740b366feb438f5c25ef" Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.334421 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-c2jt2" Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.346523 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8df73ba5-ff8c-49a8-a923-2e7c957fb043","Type":"ContainerStarted","Data":"8bd4c5bd80c4fe1f95d5a703fe1c90dfc4c20daf916ff6a3e715f0874a8cf4b3"} Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.351588 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5aae111c-2d5d-4b70-84ac-43d6db536b7b","Type":"ContainerStarted","Data":"de6038a615fc5e26be96210463c9b5386be5f4ad6e4b43b1f7131d2a13ed13f6"} Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.353218 4755 generic.go:334] "Generic (PLEG): container finished" podID="344ddf7e-967c-4278-90f8-92951d46fa13" containerID="8af799ed80b84a7df7829f0d5e65b3e44801f260658f387450104ef818096a9c" exitCode=0 Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.353244 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" event={"ID":"344ddf7e-967c-4278-90f8-92951d46fa13","Type":"ContainerDied","Data":"8af799ed80b84a7df7829f0d5e65b3e44801f260658f387450104ef818096a9c"} Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.353257 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" event={"ID":"344ddf7e-967c-4278-90f8-92951d46fa13","Type":"ContainerStarted","Data":"ff05c844888d58ee4a1ef9ef60019f658f772f2e5623d0249fde2c44dee82cfe"} Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.394632 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-c2jt2"] Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.402222 4755 scope.go:117] "RemoveContainer" containerID="45a76b4b65074a1b235b20bd21f483db3a41efe6cb4c650dd636b69f783411a4" Feb 02 22:54:42 crc kubenswrapper[4755]: I0202 22:54:42.406309 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-c2jt2"] Feb 02 22:54:43 crc kubenswrapper[4755]: I0202 22:54:43.091795 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6a7c105-7184-419e-a061-819d29a9a7c2" path="/var/lib/kubelet/pods/a6a7c105-7184-419e-a061-819d29a9a7c2/volumes" Feb 02 22:54:43 crc kubenswrapper[4755]: I0202 22:54:43.357040 4755 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod4f55b2a0-624c-46b1-bede-8cb15264838e"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod4f55b2a0-624c-46b1-bede-8cb15264838e] : Timed out while waiting for systemd to remove kubepods-besteffort-pod4f55b2a0_624c_46b1_bede_8cb15264838e.slice" Feb 02 22:54:43 crc kubenswrapper[4755]: E0202 22:54:43.357334 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod4f55b2a0-624c-46b1-bede-8cb15264838e] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod4f55b2a0-624c-46b1-bede-8cb15264838e] : Timed out while waiting for systemd to remove kubepods-besteffort-pod4f55b2a0_624c_46b1_bede_8cb15264838e.slice" pod="openstack/keystone-bootstrap-55dvt" podUID="4f55b2a0-624c-46b1-bede-8cb15264838e" Feb 02 22:54:43 crc kubenswrapper[4755]: I0202 22:54:43.383542 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5aae111c-2d5d-4b70-84ac-43d6db536b7b","Type":"ContainerStarted","Data":"54fc337f101506cfc583389830e1bf72fd47ddde58f8f154122c7b72bbc6f642"} Feb 02 22:54:43 crc kubenswrapper[4755]: I0202 22:54:43.383571 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5aae111c-2d5d-4b70-84ac-43d6db536b7b","Type":"ContainerStarted","Data":"92e096dbfb96eafb896d7651bad9bdfc0c1f065cf14fe1e60476aced074ecf8f"} Feb 02 22:54:43 crc kubenswrapper[4755]: I0202 22:54:43.383955 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Feb 02 22:54:43 crc kubenswrapper[4755]: I0202 22:54:43.419992 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=3.41997185 podStartE2EDuration="3.41997185s" podCreationTimestamp="2026-02-02 22:54:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:43.408678923 +0000 UTC m=+1239.099899249" watchObservedRunningTime="2026-02-02 22:54:43.41997185 +0000 UTC m=+1239.111192176" Feb 02 22:54:43 crc kubenswrapper[4755]: I0202 22:54:43.580849 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 02 22:54:43 crc kubenswrapper[4755]: I0202 22:54:43.629369 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 22:54:44 crc kubenswrapper[4755]: I0202 22:54:44.138496 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:54:44 crc kubenswrapper[4755]: I0202 22:54:44.392653 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" event={"ID":"344ddf7e-967c-4278-90f8-92951d46fa13","Type":"ContainerStarted","Data":"8bbbd80966d4299b6bfdf7603df94c32541b157ee7a997feac3dbae41d6e13bc"} Feb 02 22:54:44 crc kubenswrapper[4755]: I0202 22:54:44.392780 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:44 crc kubenswrapper[4755]: I0202 22:54:44.396578 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"719a114e-7ced-4950-9bce-749ce1431af8","Type":"ContainerStarted","Data":"575cfa105cdbe3bda469922da13e7a021eb5e7d085b3f06ecb7f43b073c73759"} Feb 02 22:54:44 crc kubenswrapper[4755]: I0202 22:54:44.397923 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"184566af-1588-4a8c-9b65-3de6f1bda382","Type":"ContainerStarted","Data":"8beb1e3e9d2f795529669d47d67273dae5dec918e82e668fb22c791522a68150"} Feb 02 22:54:44 crc kubenswrapper[4755]: I0202 22:54:44.397936 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-55dvt" Feb 02 22:54:44 crc kubenswrapper[4755]: I0202 22:54:44.398333 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7eb8965b-8c6b-410a-9402-03e460f3dffc" containerName="cinder-scheduler" containerID="cri-o://ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b" gracePeriod=30 Feb 02 22:54:44 crc kubenswrapper[4755]: I0202 22:54:44.398455 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7eb8965b-8c6b-410a-9402-03e460f3dffc" containerName="probe" containerID="cri-o://763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369" gracePeriod=30 Feb 02 22:54:44 crc kubenswrapper[4755]: I0202 22:54:44.419049 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" podStartSLOduration=4.419032963 podStartE2EDuration="4.419032963s" podCreationTimestamp="2026-02-02 22:54:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:44.412066878 +0000 UTC m=+1240.103287214" watchObservedRunningTime="2026-02-02 22:54:44.419032963 +0000 UTC m=+1240.110253289" Feb 02 22:54:44 crc kubenswrapper[4755]: I0202 22:54:44.438493 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=3.129676573 podStartE2EDuration="4.438476869s" podCreationTimestamp="2026-02-02 22:54:40 +0000 UTC" firstStartedPulling="2026-02-02 22:54:41.600839913 +0000 UTC m=+1237.292060229" lastFinishedPulling="2026-02-02 22:54:42.909640199 +0000 UTC m=+1238.600860525" observedRunningTime="2026-02-02 22:54:44.436308828 +0000 UTC m=+1240.127529164" watchObservedRunningTime="2026-02-02 22:54:44.438476869 +0000 UTC m=+1240.129697195" Feb 02 22:54:44 crc kubenswrapper[4755]: I0202 22:54:44.455806 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:54:45 crc kubenswrapper[4755]: I0202 22:54:45.407325 4755 generic.go:334] "Generic (PLEG): container finished" podID="7eb8965b-8c6b-410a-9402-03e460f3dffc" containerID="763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369" exitCode=0 Feb 02 22:54:45 crc kubenswrapper[4755]: I0202 22:54:45.407420 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7eb8965b-8c6b-410a-9402-03e460f3dffc","Type":"ContainerDied","Data":"763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369"} Feb 02 22:54:45 crc kubenswrapper[4755]: I0202 22:54:45.407903 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="5aae111c-2d5d-4b70-84ac-43d6db536b7b" containerName="cloudkitty-api-log" containerID="cri-o://92e096dbfb96eafb896d7651bad9bdfc0c1f065cf14fe1e60476aced074ecf8f" gracePeriod=30 Feb 02 22:54:45 crc kubenswrapper[4755]: I0202 22:54:45.408343 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="5aae111c-2d5d-4b70-84ac-43d6db536b7b" containerName="cloudkitty-api" containerID="cri-o://54fc337f101506cfc583389830e1bf72fd47ddde58f8f154122c7b72bbc6f642" gracePeriod=30 Feb 02 22:54:45 crc kubenswrapper[4755]: I0202 22:54:45.554386 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:45 crc kubenswrapper[4755]: I0202 22:54:45.668493 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:45 crc kubenswrapper[4755]: I0202 22:54:45.677631 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.216328 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-f565458cd-6bkv6" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.291302 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-5bdd8df796-zxkxp"] Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.459897 4755 generic.go:334] "Generic (PLEG): container finished" podID="5aae111c-2d5d-4b70-84ac-43d6db536b7b" containerID="54fc337f101506cfc583389830e1bf72fd47ddde58f8f154122c7b72bbc6f642" exitCode=0 Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.459924 4755 generic.go:334] "Generic (PLEG): container finished" podID="5aae111c-2d5d-4b70-84ac-43d6db536b7b" containerID="92e096dbfb96eafb896d7651bad9bdfc0c1f065cf14fe1e60476aced074ecf8f" exitCode=143 Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.460011 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5aae111c-2d5d-4b70-84ac-43d6db536b7b","Type":"ContainerDied","Data":"54fc337f101506cfc583389830e1bf72fd47ddde58f8f154122c7b72bbc6f642"} Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.460037 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5aae111c-2d5d-4b70-84ac-43d6db536b7b","Type":"ContainerDied","Data":"92e096dbfb96eafb896d7651bad9bdfc0c1f065cf14fe1e60476aced074ecf8f"} Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.460097 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="184566af-1588-4a8c-9b65-3de6f1bda382" containerName="cloudkitty-proc" containerID="cri-o://8beb1e3e9d2f795529669d47d67273dae5dec918e82e668fb22c791522a68150" gracePeriod=30 Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.717024 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.751233 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9h58\" (UniqueName: \"kubernetes.io/projected/5aae111c-2d5d-4b70-84ac-43d6db536b7b-kube-api-access-c9h58\") pod \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.751297 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-config-data\") pod \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.751328 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-config-data-custom\") pod \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.751413 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-combined-ca-bundle\") pod \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.751434 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-scripts\") pod \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.751519 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5aae111c-2d5d-4b70-84ac-43d6db536b7b-certs\") pod \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.751658 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5aae111c-2d5d-4b70-84ac-43d6db536b7b-logs\") pod \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\" (UID: \"5aae111c-2d5d-4b70-84ac-43d6db536b7b\") " Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.753911 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aae111c-2d5d-4b70-84ac-43d6db536b7b-logs" (OuterVolumeSpecName: "logs") pod "5aae111c-2d5d-4b70-84ac-43d6db536b7b" (UID: "5aae111c-2d5d-4b70-84ac-43d6db536b7b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.835880 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5aae111c-2d5d-4b70-84ac-43d6db536b7b" (UID: "5aae111c-2d5d-4b70-84ac-43d6db536b7b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.840958 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-config-data" (OuterVolumeSpecName: "config-data") pod "5aae111c-2d5d-4b70-84ac-43d6db536b7b" (UID: "5aae111c-2d5d-4b70-84ac-43d6db536b7b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.844944 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5aae111c-2d5d-4b70-84ac-43d6db536b7b-kube-api-access-c9h58" (OuterVolumeSpecName: "kube-api-access-c9h58") pod "5aae111c-2d5d-4b70-84ac-43d6db536b7b" (UID: "5aae111c-2d5d-4b70-84ac-43d6db536b7b"). InnerVolumeSpecName "kube-api-access-c9h58". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.861884 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5aae111c-2d5d-4b70-84ac-43d6db536b7b-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.861919 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9h58\" (UniqueName: \"kubernetes.io/projected/5aae111c-2d5d-4b70-84ac-43d6db536b7b-kube-api-access-c9h58\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.861930 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.861943 4755 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.907530 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5aae111c-2d5d-4b70-84ac-43d6db536b7b-certs" (OuterVolumeSpecName: "certs") pod "5aae111c-2d5d-4b70-84ac-43d6db536b7b" (UID: "5aae111c-2d5d-4b70-84ac-43d6db536b7b"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.911094 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-scripts" (OuterVolumeSpecName: "scripts") pod "5aae111c-2d5d-4b70-84ac-43d6db536b7b" (UID: "5aae111c-2d5d-4b70-84ac-43d6db536b7b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.967071 4755 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5aae111c-2d5d-4b70-84ac-43d6db536b7b-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:46 crc kubenswrapper[4755]: I0202 22:54:46.967105 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.004351 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5aae111c-2d5d-4b70-84ac-43d6db536b7b" (UID: "5aae111c-2d5d-4b70-84ac-43d6db536b7b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.073164 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5aae111c-2d5d-4b70-84ac-43d6db536b7b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.293152 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7cb7f594d6-76xlv" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.503311 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5aae111c-2d5d-4b70-84ac-43d6db536b7b","Type":"ContainerDied","Data":"de6038a615fc5e26be96210463c9b5386be5f4ad6e4b43b1f7131d2a13ed13f6"} Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.503326 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.503361 4755 scope.go:117] "RemoveContainer" containerID="54fc337f101506cfc583389830e1bf72fd47ddde58f8f154122c7b72bbc6f642" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.512889 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-5bdd8df796-zxkxp" podUID="58da41fb-8aca-4566-a2c2-a13c57ee04ce" containerName="placement-log" containerID="cri-o://813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0" gracePeriod=30 Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.513368 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-5bdd8df796-zxkxp" podUID="58da41fb-8aca-4566-a2c2-a13c57ee04ce" containerName="placement-api" containerID="cri-o://4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a" gracePeriod=30 Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.514835 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"719a114e-7ced-4950-9bce-749ce1431af8","Type":"ContainerStarted","Data":"a46108c25b9438f582147987d0e43bdf0f1dec1e7d3a95a12e2a1c45a98e7b31"} Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.514910 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.529218 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.586558 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.594470 4755 scope.go:117] "RemoveContainer" containerID="92e096dbfb96eafb896d7651bad9bdfc0c1f065cf14fe1e60476aced074ecf8f" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.610656 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:54:47 crc kubenswrapper[4755]: E0202 22:54:47.611124 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6a7c105-7184-419e-a061-819d29a9a7c2" containerName="init" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.611138 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6a7c105-7184-419e-a061-819d29a9a7c2" containerName="init" Feb 02 22:54:47 crc kubenswrapper[4755]: E0202 22:54:47.611157 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aae111c-2d5d-4b70-84ac-43d6db536b7b" containerName="cloudkitty-api-log" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.611163 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aae111c-2d5d-4b70-84ac-43d6db536b7b" containerName="cloudkitty-api-log" Feb 02 22:54:47 crc kubenswrapper[4755]: E0202 22:54:47.611176 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6a7c105-7184-419e-a061-819d29a9a7c2" containerName="dnsmasq-dns" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.611185 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6a7c105-7184-419e-a061-819d29a9a7c2" containerName="dnsmasq-dns" Feb 02 22:54:47 crc kubenswrapper[4755]: E0202 22:54:47.611205 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aae111c-2d5d-4b70-84ac-43d6db536b7b" containerName="cloudkitty-api" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.611212 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aae111c-2d5d-4b70-84ac-43d6db536b7b" containerName="cloudkitty-api" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.611384 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5aae111c-2d5d-4b70-84ac-43d6db536b7b" containerName="cloudkitty-api-log" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.611397 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5aae111c-2d5d-4b70-84ac-43d6db536b7b" containerName="cloudkitty-api" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.611410 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6a7c105-7184-419e-a061-819d29a9a7c2" containerName="dnsmasq-dns" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.612826 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.613123 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.720365958 podStartE2EDuration="8.613107823s" podCreationTimestamp="2026-02-02 22:54:39 +0000 UTC" firstStartedPulling="2026-02-02 22:54:40.066138588 +0000 UTC m=+1235.757358924" lastFinishedPulling="2026-02-02 22:54:45.958880473 +0000 UTC m=+1241.650100789" observedRunningTime="2026-02-02 22:54:47.552165403 +0000 UTC m=+1243.243385739" watchObservedRunningTime="2026-02-02 22:54:47.613107823 +0000 UTC m=+1243.304328149" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.615886 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.616076 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.616187 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.662509 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.791011 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-scripts\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.791059 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/be9e900a-a2d8-4291-9817-fccd96e11436-certs\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.791088 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be9e900a-a2d8-4291-9817-fccd96e11436-logs\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.791143 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-config-data\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.791275 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.791321 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.791630 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.791738 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz96t\" (UniqueName: \"kubernetes.io/projected/be9e900a-a2d8-4291-9817-fccd96e11436-kube-api-access-nz96t\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.791966 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.893806 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be9e900a-a2d8-4291-9817-fccd96e11436-logs\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.893876 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-config-data\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.893907 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.893927 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.894002 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.894037 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz96t\" (UniqueName: \"kubernetes.io/projected/be9e900a-a2d8-4291-9817-fccd96e11436-kube-api-access-nz96t\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.894076 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.894106 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-scripts\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.894124 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/be9e900a-a2d8-4291-9817-fccd96e11436-certs\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.894249 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be9e900a-a2d8-4291-9817-fccd96e11436-logs\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.899707 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.900228 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.900363 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-scripts\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.901328 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-config-data\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.905267 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/be9e900a-a2d8-4291-9817-fccd96e11436-certs\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.912252 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.913102 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz96t\" (UniqueName: \"kubernetes.io/projected/be9e900a-a2d8-4291-9817-fccd96e11436-kube-api-access-nz96t\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.913206 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " pod="openstack/cloudkitty-api-0" Feb 02 22:54:47 crc kubenswrapper[4755]: I0202 22:54:47.954334 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Feb 02 22:54:48 crc kubenswrapper[4755]: I0202 22:54:48.459252 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:54:48 crc kubenswrapper[4755]: W0202 22:54:48.459587 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe9e900a_a2d8_4291_9817_fccd96e11436.slice/crio-552a5a0aba00da5821318b857a434cd381350faea5ddba038ef29ff4173466f1 WatchSource:0}: Error finding container 552a5a0aba00da5821318b857a434cd381350faea5ddba038ef29ff4173466f1: Status 404 returned error can't find the container with id 552a5a0aba00da5821318b857a434cd381350faea5ddba038ef29ff4173466f1 Feb 02 22:54:48 crc kubenswrapper[4755]: I0202 22:54:48.538330 4755 generic.go:334] "Generic (PLEG): container finished" podID="58da41fb-8aca-4566-a2c2-a13c57ee04ce" containerID="813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0" exitCode=143 Feb 02 22:54:48 crc kubenswrapper[4755]: I0202 22:54:48.538417 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5bdd8df796-zxkxp" event={"ID":"58da41fb-8aca-4566-a2c2-a13c57ee04ce","Type":"ContainerDied","Data":"813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0"} Feb 02 22:54:48 crc kubenswrapper[4755]: I0202 22:54:48.546646 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"be9e900a-a2d8-4291-9817-fccd96e11436","Type":"ContainerStarted","Data":"552a5a0aba00da5821318b857a434cd381350faea5ddba038ef29ff4173466f1"} Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.087331 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5aae111c-2d5d-4b70-84ac-43d6db536b7b" path="/var/lib/kubelet/pods/5aae111c-2d5d-4b70-84ac-43d6db536b7b/volumes" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.152348 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.327881 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eb8965b-8c6b-410a-9402-03e460f3dffc-etc-machine-id\") pod \"7eb8965b-8c6b-410a-9402-03e460f3dffc\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.327977 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-scripts\") pod \"7eb8965b-8c6b-410a-9402-03e460f3dffc\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.327993 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7eb8965b-8c6b-410a-9402-03e460f3dffc-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7eb8965b-8c6b-410a-9402-03e460f3dffc" (UID: "7eb8965b-8c6b-410a-9402-03e460f3dffc"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.328004 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-config-data-custom\") pod \"7eb8965b-8c6b-410a-9402-03e460f3dffc\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.328051 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-config-data\") pod \"7eb8965b-8c6b-410a-9402-03e460f3dffc\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.328145 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-combined-ca-bundle\") pod \"7eb8965b-8c6b-410a-9402-03e460f3dffc\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.328263 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qttfb\" (UniqueName: \"kubernetes.io/projected/7eb8965b-8c6b-410a-9402-03e460f3dffc-kube-api-access-qttfb\") pod \"7eb8965b-8c6b-410a-9402-03e460f3dffc\" (UID: \"7eb8965b-8c6b-410a-9402-03e460f3dffc\") " Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.328682 4755 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eb8965b-8c6b-410a-9402-03e460f3dffc-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.335737 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-scripts" (OuterVolumeSpecName: "scripts") pod "7eb8965b-8c6b-410a-9402-03e460f3dffc" (UID: "7eb8965b-8c6b-410a-9402-03e460f3dffc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.336923 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eb8965b-8c6b-410a-9402-03e460f3dffc-kube-api-access-qttfb" (OuterVolumeSpecName: "kube-api-access-qttfb") pod "7eb8965b-8c6b-410a-9402-03e460f3dffc" (UID: "7eb8965b-8c6b-410a-9402-03e460f3dffc"). InnerVolumeSpecName "kube-api-access-qttfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.337021 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7eb8965b-8c6b-410a-9402-03e460f3dffc" (UID: "7eb8965b-8c6b-410a-9402-03e460f3dffc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.386591 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7eb8965b-8c6b-410a-9402-03e460f3dffc" (UID: "7eb8965b-8c6b-410a-9402-03e460f3dffc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.430480 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.430669 4755 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.430766 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.430832 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qttfb\" (UniqueName: \"kubernetes.io/projected/7eb8965b-8c6b-410a-9402-03e460f3dffc-kube-api-access-qttfb\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.430765 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-config-data" (OuterVolumeSpecName: "config-data") pod "7eb8965b-8c6b-410a-9402-03e460f3dffc" (UID: "7eb8965b-8c6b-410a-9402-03e460f3dffc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.532189 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eb8965b-8c6b-410a-9402-03e460f3dffc-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.558765 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"be9e900a-a2d8-4291-9817-fccd96e11436","Type":"ContainerStarted","Data":"557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386"} Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.559758 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"be9e900a-a2d8-4291-9817-fccd96e11436","Type":"ContainerStarted","Data":"3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741"} Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.559865 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.580695 4755 generic.go:334] "Generic (PLEG): container finished" podID="7eb8965b-8c6b-410a-9402-03e460f3dffc" containerID="ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b" exitCode=0 Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.580760 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7eb8965b-8c6b-410a-9402-03e460f3dffc","Type":"ContainerDied","Data":"ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b"} Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.580769 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.580786 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7eb8965b-8c6b-410a-9402-03e460f3dffc","Type":"ContainerDied","Data":"2d502db7a7699956caf59f4dc7156fbe8eb325ba110decc1d70de6bf7855d98d"} Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.580803 4755 scope.go:117] "RemoveContainer" containerID="763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.605981 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=2.605963654 podStartE2EDuration="2.605963654s" podCreationTimestamp="2026-02-02 22:54:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:49.591091417 +0000 UTC m=+1245.282311753" watchObservedRunningTime="2026-02-02 22:54:49.605963654 +0000 UTC m=+1245.297183980" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.662959 4755 scope.go:117] "RemoveContainer" containerID="ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.685990 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.719830 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.731128 4755 scope.go:117] "RemoveContainer" containerID="763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369" Feb 02 22:54:49 crc kubenswrapper[4755]: E0202 22:54:49.732620 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369\": container with ID starting with 763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369 not found: ID does not exist" containerID="763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.732668 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369"} err="failed to get container status \"763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369\": rpc error: code = NotFound desc = could not find container \"763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369\": container with ID starting with 763952e928581b0aac101da50d23e86e5d2ffc3af29bdd9acaf803b7ca65c369 not found: ID does not exist" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.732690 4755 scope.go:117] "RemoveContainer" containerID="ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b" Feb 02 22:54:49 crc kubenswrapper[4755]: E0202 22:54:49.736024 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b\": container with ID starting with ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b not found: ID does not exist" containerID="ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.736060 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b"} err="failed to get container status \"ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b\": rpc error: code = NotFound desc = could not find container \"ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b\": container with ID starting with ada66a31e9d9da44388b0ed882af5efae575c907f483202c0e54231f247f4d9b not found: ID does not exist" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.756688 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 22:54:49 crc kubenswrapper[4755]: E0202 22:54:49.757637 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb8965b-8c6b-410a-9402-03e460f3dffc" containerName="probe" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.757658 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb8965b-8c6b-410a-9402-03e460f3dffc" containerName="probe" Feb 02 22:54:49 crc kubenswrapper[4755]: E0202 22:54:49.757691 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb8965b-8c6b-410a-9402-03e460f3dffc" containerName="cinder-scheduler" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.757698 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb8965b-8c6b-410a-9402-03e460f3dffc" containerName="cinder-scheduler" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.762655 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eb8965b-8c6b-410a-9402-03e460f3dffc" containerName="cinder-scheduler" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.762859 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eb8965b-8c6b-410a-9402-03e460f3dffc" containerName="probe" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.764219 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.771164 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.771522 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.943008 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/35c710b6-d258-4242-99f7-c1f3216cfc0c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.943134 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35c710b6-d258-4242-99f7-c1f3216cfc0c-config-data\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.943248 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7nmj\" (UniqueName: \"kubernetes.io/projected/35c710b6-d258-4242-99f7-c1f3216cfc0c-kube-api-access-h7nmj\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.943356 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35c710b6-d258-4242-99f7-c1f3216cfc0c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.943426 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/35c710b6-d258-4242-99f7-c1f3216cfc0c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:49 crc kubenswrapper[4755]: I0202 22:54:49.943567 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35c710b6-d258-4242-99f7-c1f3216cfc0c-scripts\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.045230 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/35c710b6-d258-4242-99f7-c1f3216cfc0c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.045270 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35c710b6-d258-4242-99f7-c1f3216cfc0c-config-data\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.045312 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7nmj\" (UniqueName: \"kubernetes.io/projected/35c710b6-d258-4242-99f7-c1f3216cfc0c-kube-api-access-h7nmj\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.045367 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35c710b6-d258-4242-99f7-c1f3216cfc0c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.045370 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/35c710b6-d258-4242-99f7-c1f3216cfc0c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.045396 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/35c710b6-d258-4242-99f7-c1f3216cfc0c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.045604 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35c710b6-d258-4242-99f7-c1f3216cfc0c-scripts\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.050996 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35c710b6-d258-4242-99f7-c1f3216cfc0c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.064368 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35c710b6-d258-4242-99f7-c1f3216cfc0c-scripts\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.064644 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35c710b6-d258-4242-99f7-c1f3216cfc0c-config-data\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.064832 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/35c710b6-d258-4242-99f7-c1f3216cfc0c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.066970 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7nmj\" (UniqueName: \"kubernetes.io/projected/35c710b6-d258-4242-99f7-c1f3216cfc0c-kube-api-access-h7nmj\") pod \"cinder-scheduler-0\" (UID: \"35c710b6-d258-4242-99f7-c1f3216cfc0c\") " pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.098550 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.551839 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 02 22:54:50 crc kubenswrapper[4755]: W0202 22:54:50.563655 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35c710b6_d258_4242_99f7_c1f3216cfc0c.slice/crio-e131d73a0978abbd97672e35333bf32ebc88b3d420bbba88f1db41b5f763589f WatchSource:0}: Error finding container e131d73a0978abbd97672e35333bf32ebc88b3d420bbba88f1db41b5f763589f: Status 404 returned error can't find the container with id e131d73a0978abbd97672e35333bf32ebc88b3d420bbba88f1db41b5f763589f Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.599153 4755 generic.go:334] "Generic (PLEG): container finished" podID="184566af-1588-4a8c-9b65-3de6f1bda382" containerID="8beb1e3e9d2f795529669d47d67273dae5dec918e82e668fb22c791522a68150" exitCode=0 Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.599422 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"184566af-1588-4a8c-9b65-3de6f1bda382","Type":"ContainerDied","Data":"8beb1e3e9d2f795529669d47d67273dae5dec918e82e668fb22c791522a68150"} Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.607676 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"35c710b6-d258-4242-99f7-c1f3216cfc0c","Type":"ContainerStarted","Data":"e131d73a0978abbd97672e35333bf32ebc88b3d420bbba88f1db41b5f763589f"} Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.748865 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.859445 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-config-data\") pod \"184566af-1588-4a8c-9b65-3de6f1bda382\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.859597 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-combined-ca-bundle\") pod \"184566af-1588-4a8c-9b65-3de6f1bda382\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.859633 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-config-data-custom\") pod \"184566af-1588-4a8c-9b65-3de6f1bda382\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.859704 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqpd7\" (UniqueName: \"kubernetes.io/projected/184566af-1588-4a8c-9b65-3de6f1bda382-kube-api-access-pqpd7\") pod \"184566af-1588-4a8c-9b65-3de6f1bda382\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.859819 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-scripts\") pod \"184566af-1588-4a8c-9b65-3de6f1bda382\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.859882 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/184566af-1588-4a8c-9b65-3de6f1bda382-certs\") pod \"184566af-1588-4a8c-9b65-3de6f1bda382\" (UID: \"184566af-1588-4a8c-9b65-3de6f1bda382\") " Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.865500 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/184566af-1588-4a8c-9b65-3de6f1bda382-kube-api-access-pqpd7" (OuterVolumeSpecName: "kube-api-access-pqpd7") pod "184566af-1588-4a8c-9b65-3de6f1bda382" (UID: "184566af-1588-4a8c-9b65-3de6f1bda382"). InnerVolumeSpecName "kube-api-access-pqpd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.874437 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-scripts" (OuterVolumeSpecName: "scripts") pod "184566af-1588-4a8c-9b65-3de6f1bda382" (UID: "184566af-1588-4a8c-9b65-3de6f1bda382"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.874475 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "184566af-1588-4a8c-9b65-3de6f1bda382" (UID: "184566af-1588-4a8c-9b65-3de6f1bda382"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.874942 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/184566af-1588-4a8c-9b65-3de6f1bda382-certs" (OuterVolumeSpecName: "certs") pod "184566af-1588-4a8c-9b65-3de6f1bda382" (UID: "184566af-1588-4a8c-9b65-3de6f1bda382"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.895827 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-config-data" (OuterVolumeSpecName: "config-data") pod "184566af-1588-4a8c-9b65-3de6f1bda382" (UID: "184566af-1588-4a8c-9b65-3de6f1bda382"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.916295 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "184566af-1588-4a8c-9b65-3de6f1bda382" (UID: "184566af-1588-4a8c-9b65-3de6f1bda382"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.917613 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.946854 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.962881 4755 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/184566af-1588-4a8c-9b65-3de6f1bda382-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.962912 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.962922 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.962934 4755 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.962943 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqpd7\" (UniqueName: \"kubernetes.io/projected/184566af-1588-4a8c-9b65-3de6f1bda382-kube-api-access-pqpd7\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:50 crc kubenswrapper[4755]: I0202 22:54:50.962952 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/184566af-1588-4a8c-9b65-3de6f1bda382-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.006253 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-hcqk6"] Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.006782 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" podUID="f7cdcbab-c1c0-4984-bf8d-781715d75dc1" containerName="dnsmasq-dns" containerID="cri-o://d1d977b08beff48ba401e6a4010ff3ce99c2011b6dcea05e3f24572fd12dfb4f" gracePeriod=10 Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.095946 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eb8965b-8c6b-410a-9402-03e460f3dffc" path="/var/lib/kubelet/pods/7eb8965b-8c6b-410a-9402-03e460f3dffc/volumes" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.409809 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 02 22:54:51 crc kubenswrapper[4755]: E0202 22:54:51.410322 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="184566af-1588-4a8c-9b65-3de6f1bda382" containerName="cloudkitty-proc" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.410348 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="184566af-1588-4a8c-9b65-3de6f1bda382" containerName="cloudkitty-proc" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.410602 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="184566af-1588-4a8c-9b65-3de6f1bda382" containerName="cloudkitty-proc" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.411449 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.418019 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.418563 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.418685 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-6jnbt" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.424601 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.561088 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.611085 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7sph\" (UniqueName: \"kubernetes.io/projected/e160c11e-8b86-4837-9a86-bd2eb97f94d9-kube-api-access-c7sph\") pod \"openstackclient\" (UID: \"e160c11e-8b86-4837-9a86-bd2eb97f94d9\") " pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.611342 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e160c11e-8b86-4837-9a86-bd2eb97f94d9-openstack-config\") pod \"openstackclient\" (UID: \"e160c11e-8b86-4837-9a86-bd2eb97f94d9\") " pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.611371 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e160c11e-8b86-4837-9a86-bd2eb97f94d9-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e160c11e-8b86-4837-9a86-bd2eb97f94d9\") " pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.611404 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e160c11e-8b86-4837-9a86-bd2eb97f94d9-openstack-config-secret\") pod \"openstackclient\" (UID: \"e160c11e-8b86-4837-9a86-bd2eb97f94d9\") " pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.630874 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.631136 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"184566af-1588-4a8c-9b65-3de6f1bda382","Type":"ContainerDied","Data":"9c4509fca89c56ea87bf9af940e48e4796a91fe267bcdef23ef6de4b640bfb1d"} Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.631191 4755 scope.go:117] "RemoveContainer" containerID="8beb1e3e9d2f795529669d47d67273dae5dec918e82e668fb22c791522a68150" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.637212 4755 generic.go:334] "Generic (PLEG): container finished" podID="58da41fb-8aca-4566-a2c2-a13c57ee04ce" containerID="4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a" exitCode=0 Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.637276 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5bdd8df796-zxkxp" event={"ID":"58da41fb-8aca-4566-a2c2-a13c57ee04ce","Type":"ContainerDied","Data":"4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a"} Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.637303 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5bdd8df796-zxkxp" event={"ID":"58da41fb-8aca-4566-a2c2-a13c57ee04ce","Type":"ContainerDied","Data":"507913ec81cca7d331ec162bf1b44e8cfc7d5cb057465b706cebf39f7a4af20b"} Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.637363 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5bdd8df796-zxkxp" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.644222 4755 generic.go:334] "Generic (PLEG): container finished" podID="f7cdcbab-c1c0-4984-bf8d-781715d75dc1" containerID="d1d977b08beff48ba401e6a4010ff3ce99c2011b6dcea05e3f24572fd12dfb4f" exitCode=0 Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.644811 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" event={"ID":"f7cdcbab-c1c0-4984-bf8d-781715d75dc1","Type":"ContainerDied","Data":"d1d977b08beff48ba401e6a4010ff3ce99c2011b6dcea05e3f24572fd12dfb4f"} Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.669410 4755 scope.go:117] "RemoveContainer" containerID="4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.672983 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.705603 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.714374 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-public-tls-certs\") pod \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.714428 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-config-data\") pod \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.714479 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58da41fb-8aca-4566-a2c2-a13c57ee04ce-logs\") pod \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.714504 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-scripts\") pod \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.715084 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-internal-tls-certs\") pod \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.715146 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqnqs\" (UniqueName: \"kubernetes.io/projected/58da41fb-8aca-4566-a2c2-a13c57ee04ce-kube-api-access-cqnqs\") pod \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.715178 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-combined-ca-bundle\") pod \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\" (UID: \"58da41fb-8aca-4566-a2c2-a13c57ee04ce\") " Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.715370 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7sph\" (UniqueName: \"kubernetes.io/projected/e160c11e-8b86-4837-9a86-bd2eb97f94d9-kube-api-access-c7sph\") pod \"openstackclient\" (UID: \"e160c11e-8b86-4837-9a86-bd2eb97f94d9\") " pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.715397 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58da41fb-8aca-4566-a2c2-a13c57ee04ce-logs" (OuterVolumeSpecName: "logs") pod "58da41fb-8aca-4566-a2c2-a13c57ee04ce" (UID: "58da41fb-8aca-4566-a2c2-a13c57ee04ce"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.715428 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e160c11e-8b86-4837-9a86-bd2eb97f94d9-openstack-config\") pod \"openstackclient\" (UID: \"e160c11e-8b86-4837-9a86-bd2eb97f94d9\") " pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.715546 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e160c11e-8b86-4837-9a86-bd2eb97f94d9-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e160c11e-8b86-4837-9a86-bd2eb97f94d9\") " pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.715631 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e160c11e-8b86-4837-9a86-bd2eb97f94d9-openstack-config-secret\") pod \"openstackclient\" (UID: \"e160c11e-8b86-4837-9a86-bd2eb97f94d9\") " pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.715969 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58da41fb-8aca-4566-a2c2-a13c57ee04ce-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.716132 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e160c11e-8b86-4837-9a86-bd2eb97f94d9-openstack-config\") pod \"openstackclient\" (UID: \"e160c11e-8b86-4837-9a86-bd2eb97f94d9\") " pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.729668 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:54:51 crc kubenswrapper[4755]: E0202 22:54:51.730239 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58da41fb-8aca-4566-a2c2-a13c57ee04ce" containerName="placement-api" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.731898 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="58da41fb-8aca-4566-a2c2-a13c57ee04ce" containerName="placement-api" Feb 02 22:54:51 crc kubenswrapper[4755]: E0202 22:54:51.731970 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58da41fb-8aca-4566-a2c2-a13c57ee04ce" containerName="placement-log" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.732026 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="58da41fb-8aca-4566-a2c2-a13c57ee04ce" containerName="placement-log" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.732287 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="58da41fb-8aca-4566-a2c2-a13c57ee04ce" containerName="placement-log" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.732388 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="58da41fb-8aca-4566-a2c2-a13c57ee04ce" containerName="placement-api" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.733503 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.740642 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.741980 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58da41fb-8aca-4566-a2c2-a13c57ee04ce-kube-api-access-cqnqs" (OuterVolumeSpecName: "kube-api-access-cqnqs") pod "58da41fb-8aca-4566-a2c2-a13c57ee04ce" (UID: "58da41fb-8aca-4566-a2c2-a13c57ee04ce"). InnerVolumeSpecName "kube-api-access-cqnqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.748279 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e160c11e-8b86-4837-9a86-bd2eb97f94d9-openstack-config-secret\") pod \"openstackclient\" (UID: \"e160c11e-8b86-4837-9a86-bd2eb97f94d9\") " pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.748835 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7sph\" (UniqueName: \"kubernetes.io/projected/e160c11e-8b86-4837-9a86-bd2eb97f94d9-kube-api-access-c7sph\") pod \"openstackclient\" (UID: \"e160c11e-8b86-4837-9a86-bd2eb97f94d9\") " pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.749043 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-scripts" (OuterVolumeSpecName: "scripts") pod "58da41fb-8aca-4566-a2c2-a13c57ee04ce" (UID: "58da41fb-8aca-4566-a2c2-a13c57ee04ce"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.764104 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e160c11e-8b86-4837-9a86-bd2eb97f94d9-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e160c11e-8b86-4837-9a86-bd2eb97f94d9\") " pod="openstack/openstackclient" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.811607 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.819373 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-scripts\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.825062 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.825238 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-config-data\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.825373 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfnfj\" (UniqueName: \"kubernetes.io/projected/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-kube-api-access-bfnfj\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.825508 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.825795 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-certs\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.827920 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.828062 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqnqs\" (UniqueName: \"kubernetes.io/projected/58da41fb-8aca-4566-a2c2-a13c57ee04ce-kube-api-access-cqnqs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.834472 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-config-data" (OuterVolumeSpecName: "config-data") pod "58da41fb-8aca-4566-a2c2-a13c57ee04ce" (UID: "58da41fb-8aca-4566-a2c2-a13c57ee04ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.893376 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "58da41fb-8aca-4566-a2c2-a13c57ee04ce" (UID: "58da41fb-8aca-4566-a2c2-a13c57ee04ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.905894 4755 scope.go:117] "RemoveContainer" containerID="813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.915989 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "58da41fb-8aca-4566-a2c2-a13c57ee04ce" (UID: "58da41fb-8aca-4566-a2c2-a13c57ee04ce"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.923487 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.929236 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.929294 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-config-data\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.929331 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfnfj\" (UniqueName: \"kubernetes.io/projected/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-kube-api-access-bfnfj\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.929365 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.929408 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-certs\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.929474 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-scripts\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.929519 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.929530 4755 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.929540 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.944582 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-scripts\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.945134 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-config-data\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.946113 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-certs\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.948987 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.951652 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.966268 4755 scope.go:117] "RemoveContainer" containerID="4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a" Feb 02 22:54:51 crc kubenswrapper[4755]: E0202 22:54:51.971460 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a\": container with ID starting with 4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a not found: ID does not exist" containerID="4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.971515 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a"} err="failed to get container status \"4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a\": rpc error: code = NotFound desc = could not find container \"4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a\": container with ID starting with 4573734855abe19a214fd5f7e283449e2f2057dc71c168302c84b4d45de85d8a not found: ID does not exist" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.971544 4755 scope.go:117] "RemoveContainer" containerID="813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0" Feb 02 22:54:51 crc kubenswrapper[4755]: E0202 22:54:51.973094 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0\": container with ID starting with 813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0 not found: ID does not exist" containerID="813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.973144 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0"} err="failed to get container status \"813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0\": rpc error: code = NotFound desc = could not find container \"813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0\": container with ID starting with 813c7a8f5d819b2260bf9193d578d9cdc91a59a58a58a36c061eddcaabd384b0 not found: ID does not exist" Feb 02 22:54:51 crc kubenswrapper[4755]: I0202 22:54:51.985861 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfnfj\" (UniqueName: \"kubernetes.io/projected/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-kube-api-access-bfnfj\") pod \"cloudkitty-proc-0\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.016489 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "58da41fb-8aca-4566-a2c2-a13c57ee04ce" (UID: "58da41fb-8aca-4566-a2c2-a13c57ee04ce"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.036934 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-dns-swift-storage-0\") pod \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.037086 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-ovsdbserver-nb\") pod \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.037155 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-config\") pod \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.037205 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-ovsdbserver-sb\") pod \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.037262 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dw8lv\" (UniqueName: \"kubernetes.io/projected/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-kube-api-access-dw8lv\") pod \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.037333 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-dns-svc\") pod \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\" (UID: \"f7cdcbab-c1c0-4984-bf8d-781715d75dc1\") " Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.038444 4755 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/58da41fb-8aca-4566-a2c2-a13c57ee04ce-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.061560 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.062211 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-kube-api-access-dw8lv" (OuterVolumeSpecName: "kube-api-access-dw8lv") pod "f7cdcbab-c1c0-4984-bf8d-781715d75dc1" (UID: "f7cdcbab-c1c0-4984-bf8d-781715d75dc1"). InnerVolumeSpecName "kube-api-access-dw8lv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.137703 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f7cdcbab-c1c0-4984-bf8d-781715d75dc1" (UID: "f7cdcbab-c1c0-4984-bf8d-781715d75dc1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.141098 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dw8lv\" (UniqueName: \"kubernetes.io/projected/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-kube-api-access-dw8lv\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.141126 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.166607 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-config" (OuterVolumeSpecName: "config") pod "f7cdcbab-c1c0-4984-bf8d-781715d75dc1" (UID: "f7cdcbab-c1c0-4984-bf8d-781715d75dc1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.190138 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f7cdcbab-c1c0-4984-bf8d-781715d75dc1" (UID: "f7cdcbab-c1c0-4984-bf8d-781715d75dc1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.193646 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f7cdcbab-c1c0-4984-bf8d-781715d75dc1" (UID: "f7cdcbab-c1c0-4984-bf8d-781715d75dc1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.197207 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f7cdcbab-c1c0-4984-bf8d-781715d75dc1" (UID: "f7cdcbab-c1c0-4984-bf8d-781715d75dc1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.243511 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.243545 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.243560 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.243572 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7cdcbab-c1c0-4984-bf8d-781715d75dc1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.247191 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.286244 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-5bdd8df796-zxkxp"] Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.302121 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-5bdd8df796-zxkxp"] Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.670299 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"35c710b6-d258-4242-99f7-c1f3216cfc0c","Type":"ContainerStarted","Data":"7cc8e8f4d290b6c70c237a1b1e1b97dbedd1e6a87ec6cfed5400c2d8127f7f11"} Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.670619 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"35c710b6-d258-4242-99f7-c1f3216cfc0c","Type":"ContainerStarted","Data":"74b22fd5cbe3e9157785ef3900036167b0441334ef29fe7a1c6310ba1ddfc58d"} Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.681511 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 02 22:54:52 crc kubenswrapper[4755]: W0202 22:54:52.699031 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode160c11e_8b86_4837_9a86_bd2eb97f94d9.slice/crio-585ae6258f290e8c66ebf43cd470a7bcba9956fc1016c58e85afb503547ba30b WatchSource:0}: Error finding container 585ae6258f290e8c66ebf43cd470a7bcba9956fc1016c58e85afb503547ba30b: Status 404 returned error can't find the container with id 585ae6258f290e8c66ebf43cd470a7bcba9956fc1016c58e85afb503547ba30b Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.700942 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" event={"ID":"f7cdcbab-c1c0-4984-bf8d-781715d75dc1","Type":"ContainerDied","Data":"62798e56284ae033d247acc9aadbf85d6b9998aab71f92fdf9873bafbef2dd43"} Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.700984 4755 scope.go:117] "RemoveContainer" containerID="d1d977b08beff48ba401e6a4010ff3ce99c2011b6dcea05e3f24572fd12dfb4f" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.701087 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-hcqk6" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.705560 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.710484 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.7104634499999998 podStartE2EDuration="3.71046345s" podCreationTimestamp="2026-02-02 22:54:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:52.699580384 +0000 UTC m=+1248.390800710" watchObservedRunningTime="2026-02-02 22:54:52.71046345 +0000 UTC m=+1248.401683776" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.755653 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-hcqk6"] Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.757046 4755 scope.go:117] "RemoveContainer" containerID="602424169d3a36beccc90897c6afb5af8dbbbfd33ea287063a96a80fbfd41c7b" Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.767011 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-hcqk6"] Feb 02 22:54:52 crc kubenswrapper[4755]: I0202 22:54:52.849692 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:54:53 crc kubenswrapper[4755]: I0202 22:54:53.081500 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="184566af-1588-4a8c-9b65-3de6f1bda382" path="/var/lib/kubelet/pods/184566af-1588-4a8c-9b65-3de6f1bda382/volumes" Feb 02 22:54:53 crc kubenswrapper[4755]: I0202 22:54:53.082319 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58da41fb-8aca-4566-a2c2-a13c57ee04ce" path="/var/lib/kubelet/pods/58da41fb-8aca-4566-a2c2-a13c57ee04ce/volumes" Feb 02 22:54:53 crc kubenswrapper[4755]: I0202 22:54:53.082944 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7cdcbab-c1c0-4984-bf8d-781715d75dc1" path="/var/lib/kubelet/pods/f7cdcbab-c1c0-4984-bf8d-781715d75dc1/volumes" Feb 02 22:54:53 crc kubenswrapper[4755]: I0202 22:54:53.745266 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"1d3cae78-0c3c-404d-b13d-5595f3a10ddc","Type":"ContainerStarted","Data":"d3f30a550f0e5b52af18a2bd4ad31fb954e7af66400659e87b406a0171614f4d"} Feb 02 22:54:53 crc kubenswrapper[4755]: I0202 22:54:53.745583 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"1d3cae78-0c3c-404d-b13d-5595f3a10ddc","Type":"ContainerStarted","Data":"7b9f403f3ffe2917fe40995f0b922ed9943ad4baded6970729d8ede8450a64e3"} Feb 02 22:54:53 crc kubenswrapper[4755]: I0202 22:54:53.748703 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e160c11e-8b86-4837-9a86-bd2eb97f94d9","Type":"ContainerStarted","Data":"585ae6258f290e8c66ebf43cd470a7bcba9956fc1016c58e85afb503547ba30b"} Feb 02 22:54:53 crc kubenswrapper[4755]: I0202 22:54:53.769130 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=2.769113297 podStartE2EDuration="2.769113297s" podCreationTimestamp="2026-02-02 22:54:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:53.759183008 +0000 UTC m=+1249.450403344" watchObservedRunningTime="2026-02-02 22:54:53.769113297 +0000 UTC m=+1249.460333623" Feb 02 22:54:55 crc kubenswrapper[4755]: I0202 22:54:55.099659 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.741869 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-7857dcf8cf-dgghm"] Feb 02 22:54:56 crc kubenswrapper[4755]: E0202 22:54:56.742608 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7cdcbab-c1c0-4984-bf8d-781715d75dc1" containerName="init" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.742624 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7cdcbab-c1c0-4984-bf8d-781715d75dc1" containerName="init" Feb 02 22:54:56 crc kubenswrapper[4755]: E0202 22:54:56.742641 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7cdcbab-c1c0-4984-bf8d-781715d75dc1" containerName="dnsmasq-dns" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.742651 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7cdcbab-c1c0-4984-bf8d-781715d75dc1" containerName="dnsmasq-dns" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.742874 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7cdcbab-c1c0-4984-bf8d-781715d75dc1" containerName="dnsmasq-dns" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.743963 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.748114 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.748122 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.750576 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.755566 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7857dcf8cf-dgghm"] Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.759282 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2fc39798-6917-40f3-badb-2633a9b0f37a-run-httpd\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.759382 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2fc39798-6917-40f3-badb-2633a9b0f37a-log-httpd\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.759436 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2fc39798-6917-40f3-badb-2633a9b0f37a-public-tls-certs\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.759462 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2fc39798-6917-40f3-badb-2633a9b0f37a-internal-tls-certs\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.759497 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fc39798-6917-40f3-badb-2633a9b0f37a-config-data\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.759539 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fc39798-6917-40f3-badb-2633a9b0f37a-combined-ca-bundle\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.759593 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wg2pn\" (UniqueName: \"kubernetes.io/projected/2fc39798-6917-40f3-badb-2633a9b0f37a-kube-api-access-wg2pn\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.759621 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2fc39798-6917-40f3-badb-2633a9b0f37a-etc-swift\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.860815 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2fc39798-6917-40f3-badb-2633a9b0f37a-public-tls-certs\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.860856 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2fc39798-6917-40f3-badb-2633a9b0f37a-internal-tls-certs\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.860884 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fc39798-6917-40f3-badb-2633a9b0f37a-config-data\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.860916 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fc39798-6917-40f3-badb-2633a9b0f37a-combined-ca-bundle\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.860975 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wg2pn\" (UniqueName: \"kubernetes.io/projected/2fc39798-6917-40f3-badb-2633a9b0f37a-kube-api-access-wg2pn\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.860999 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2fc39798-6917-40f3-badb-2633a9b0f37a-etc-swift\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.861037 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2fc39798-6917-40f3-badb-2633a9b0f37a-run-httpd\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.861090 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2fc39798-6917-40f3-badb-2633a9b0f37a-log-httpd\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.861538 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2fc39798-6917-40f3-badb-2633a9b0f37a-log-httpd\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.862479 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2fc39798-6917-40f3-badb-2633a9b0f37a-run-httpd\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.869473 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2fc39798-6917-40f3-badb-2633a9b0f37a-etc-swift\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.871389 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fc39798-6917-40f3-badb-2633a9b0f37a-config-data\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.875662 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2fc39798-6917-40f3-badb-2633a9b0f37a-public-tls-certs\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.877141 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wg2pn\" (UniqueName: \"kubernetes.io/projected/2fc39798-6917-40f3-badb-2633a9b0f37a-kube-api-access-wg2pn\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.880350 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fc39798-6917-40f3-badb-2633a9b0f37a-combined-ca-bundle\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:56 crc kubenswrapper[4755]: I0202 22:54:56.881309 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2fc39798-6917-40f3-badb-2633a9b0f37a-internal-tls-certs\") pod \"swift-proxy-7857dcf8cf-dgghm\" (UID: \"2fc39798-6917-40f3-badb-2633a9b0f37a\") " pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:57 crc kubenswrapper[4755]: I0202 22:54:57.073460 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:57 crc kubenswrapper[4755]: I0202 22:54:57.623807 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:54:57 crc kubenswrapper[4755]: I0202 22:54:57.625272 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="sg-core" containerID="cri-o://575cfa105cdbe3bda469922da13e7a021eb5e7d085b3f06ecb7f43b073c73759" gracePeriod=30 Feb 02 22:54:57 crc kubenswrapper[4755]: I0202 22:54:57.625382 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="proxy-httpd" containerID="cri-o://a46108c25b9438f582147987d0e43bdf0f1dec1e7d3a95a12e2a1c45a98e7b31" gracePeriod=30 Feb 02 22:54:57 crc kubenswrapper[4755]: I0202 22:54:57.625434 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="ceilometer-notification-agent" containerID="cri-o://17317f6e9977ef6b00563c3e374efc7d5d24c99fff0798c311ab9d82240af718" gracePeriod=30 Feb 02 22:54:57 crc kubenswrapper[4755]: I0202 22:54:57.625747 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="ceilometer-central-agent" containerID="cri-o://40543d008b0bb18e4894c6386b05a4baf60b9b181756b20c142fe801819c54bc" gracePeriod=30 Feb 02 22:54:57 crc kubenswrapper[4755]: I0202 22:54:57.646925 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.191:3000/\": EOF" Feb 02 22:54:57 crc kubenswrapper[4755]: I0202 22:54:57.729484 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7857dcf8cf-dgghm"] Feb 02 22:54:57 crc kubenswrapper[4755]: I0202 22:54:57.828789 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7857dcf8cf-dgghm" event={"ID":"2fc39798-6917-40f3-badb-2633a9b0f37a","Type":"ContainerStarted","Data":"a55e39de54c36bd8e73ebfde4e3637367ac86b1eb92888c9623ed9c5ca9e8317"} Feb 02 22:54:57 crc kubenswrapper[4755]: I0202 22:54:57.838613 4755 generic.go:334] "Generic (PLEG): container finished" podID="719a114e-7ced-4950-9bce-749ce1431af8" containerID="575cfa105cdbe3bda469922da13e7a021eb5e7d085b3f06ecb7f43b073c73759" exitCode=2 Feb 02 22:54:57 crc kubenswrapper[4755]: I0202 22:54:57.838658 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"719a114e-7ced-4950-9bce-749ce1431af8","Type":"ContainerDied","Data":"575cfa105cdbe3bda469922da13e7a021eb5e7d085b3f06ecb7f43b073c73759"} Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.515341 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-d7cb5dffc-4r8bd" Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.583535 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-85c9cbd9b8-2tlmh"] Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.583807 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-85c9cbd9b8-2tlmh" podUID="107d5dc0-af83-4833-bf2f-cd99d0e1f15d" containerName="neutron-api" containerID="cri-o://6694fef6169e0b72eb1f3de2061069f4acd39aad940d7d53961c3036cd18ddd5" gracePeriod=30 Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.583899 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-85c9cbd9b8-2tlmh" podUID="107d5dc0-af83-4833-bf2f-cd99d0e1f15d" containerName="neutron-httpd" containerID="cri-o://cf39e69b8535440711741f79a967337f1e9e8694f99048739d48de23d6d54f68" gracePeriod=30 Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.851165 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7857dcf8cf-dgghm" event={"ID":"2fc39798-6917-40f3-badb-2633a9b0f37a","Type":"ContainerStarted","Data":"e31a36ec3197b4e479b4673d5f2663d3e50810064118e2e77279cc1ed80afb2f"} Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.851487 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.851499 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.851511 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7857dcf8cf-dgghm" event={"ID":"2fc39798-6917-40f3-badb-2633a9b0f37a","Type":"ContainerStarted","Data":"dba1156b47b9f641f43afb55f55732455051d3779287db7c31db836e735c322c"} Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.856240 4755 generic.go:334] "Generic (PLEG): container finished" podID="107d5dc0-af83-4833-bf2f-cd99d0e1f15d" containerID="cf39e69b8535440711741f79a967337f1e9e8694f99048739d48de23d6d54f68" exitCode=0 Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.856295 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85c9cbd9b8-2tlmh" event={"ID":"107d5dc0-af83-4833-bf2f-cd99d0e1f15d","Type":"ContainerDied","Data":"cf39e69b8535440711741f79a967337f1e9e8694f99048739d48de23d6d54f68"} Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.858776 4755 generic.go:334] "Generic (PLEG): container finished" podID="719a114e-7ced-4950-9bce-749ce1431af8" containerID="a46108c25b9438f582147987d0e43bdf0f1dec1e7d3a95a12e2a1c45a98e7b31" exitCode=0 Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.858794 4755 generic.go:334] "Generic (PLEG): container finished" podID="719a114e-7ced-4950-9bce-749ce1431af8" containerID="40543d008b0bb18e4894c6386b05a4baf60b9b181756b20c142fe801819c54bc" exitCode=0 Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.858810 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"719a114e-7ced-4950-9bce-749ce1431af8","Type":"ContainerDied","Data":"a46108c25b9438f582147987d0e43bdf0f1dec1e7d3a95a12e2a1c45a98e7b31"} Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.858830 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"719a114e-7ced-4950-9bce-749ce1431af8","Type":"ContainerDied","Data":"40543d008b0bb18e4894c6386b05a4baf60b9b181756b20c142fe801819c54bc"} Feb 02 22:54:58 crc kubenswrapper[4755]: I0202 22:54:58.870078 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-7857dcf8cf-dgghm" podStartSLOduration=2.870063454 podStartE2EDuration="2.870063454s" podCreationTimestamp="2026-02-02 22:54:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:54:58.868017037 +0000 UTC m=+1254.559237363" watchObservedRunningTime="2026-02-02 22:54:58.870063454 +0000 UTC m=+1254.561283780" Feb 02 22:55:00 crc kubenswrapper[4755]: I0202 22:55:00.419555 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 02 22:55:01 crc kubenswrapper[4755]: I0202 22:55:01.890186 4755 generic.go:334] "Generic (PLEG): container finished" podID="719a114e-7ced-4950-9bce-749ce1431af8" containerID="17317f6e9977ef6b00563c3e374efc7d5d24c99fff0798c311ab9d82240af718" exitCode=0 Feb 02 22:55:01 crc kubenswrapper[4755]: I0202 22:55:01.890418 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"719a114e-7ced-4950-9bce-749ce1431af8","Type":"ContainerDied","Data":"17317f6e9977ef6b00563c3e374efc7d5d24c99fff0798c311ab9d82240af718"} Feb 02 22:55:02 crc kubenswrapper[4755]: I0202 22:55:02.082331 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:55:02 crc kubenswrapper[4755]: I0202 22:55:02.903452 4755 generic.go:334] "Generic (PLEG): container finished" podID="107d5dc0-af83-4833-bf2f-cd99d0e1f15d" containerID="6694fef6169e0b72eb1f3de2061069f4acd39aad940d7d53961c3036cd18ddd5" exitCode=0 Feb 02 22:55:02 crc kubenswrapper[4755]: I0202 22:55:02.903488 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85c9cbd9b8-2tlmh" event={"ID":"107d5dc0-af83-4833-bf2f-cd99d0e1f15d","Type":"ContainerDied","Data":"6694fef6169e0b72eb1f3de2061069f4acd39aad940d7d53961c3036cd18ddd5"} Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.416562 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.472886 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-config-data\") pod \"719a114e-7ced-4950-9bce-749ce1431af8\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.473173 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/719a114e-7ced-4950-9bce-749ce1431af8-run-httpd\") pod \"719a114e-7ced-4950-9bce-749ce1431af8\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.473323 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/719a114e-7ced-4950-9bce-749ce1431af8-log-httpd\") pod \"719a114e-7ced-4950-9bce-749ce1431af8\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.473511 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-combined-ca-bundle\") pod \"719a114e-7ced-4950-9bce-749ce1431af8\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.473681 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-sg-core-conf-yaml\") pod \"719a114e-7ced-4950-9bce-749ce1431af8\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.473719 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/719a114e-7ced-4950-9bce-749ce1431af8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "719a114e-7ced-4950-9bce-749ce1431af8" (UID: "719a114e-7ced-4950-9bce-749ce1431af8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.473804 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tl542\" (UniqueName: \"kubernetes.io/projected/719a114e-7ced-4950-9bce-749ce1431af8-kube-api-access-tl542\") pod \"719a114e-7ced-4950-9bce-749ce1431af8\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.473856 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-scripts\") pod \"719a114e-7ced-4950-9bce-749ce1431af8\" (UID: \"719a114e-7ced-4950-9bce-749ce1431af8\") " Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.473921 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/719a114e-7ced-4950-9bce-749ce1431af8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "719a114e-7ced-4950-9bce-749ce1431af8" (UID: "719a114e-7ced-4950-9bce-749ce1431af8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.474716 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/719a114e-7ced-4950-9bce-749ce1431af8-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.474828 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/719a114e-7ced-4950-9bce-749ce1431af8-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.482471 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/719a114e-7ced-4950-9bce-749ce1431af8-kube-api-access-tl542" (OuterVolumeSpecName: "kube-api-access-tl542") pod "719a114e-7ced-4950-9bce-749ce1431af8" (UID: "719a114e-7ced-4950-9bce-749ce1431af8"). InnerVolumeSpecName "kube-api-access-tl542". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.483594 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-scripts" (OuterVolumeSpecName: "scripts") pod "719a114e-7ced-4950-9bce-749ce1431af8" (UID: "719a114e-7ced-4950-9bce-749ce1431af8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.487658 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.510612 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "719a114e-7ced-4950-9bce-749ce1431af8" (UID: "719a114e-7ced-4950-9bce-749ce1431af8"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.576476 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnsn9\" (UniqueName: \"kubernetes.io/projected/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-kube-api-access-pnsn9\") pod \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.576972 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-config\") pod \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.577027 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-httpd-config\") pod \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.577057 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-combined-ca-bundle\") pod \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.577102 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-ovndb-tls-certs\") pod \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\" (UID: \"107d5dc0-af83-4833-bf2f-cd99d0e1f15d\") " Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.577521 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tl542\" (UniqueName: \"kubernetes.io/projected/719a114e-7ced-4950-9bce-749ce1431af8-kube-api-access-tl542\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.577545 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.577558 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.585297 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-kube-api-access-pnsn9" (OuterVolumeSpecName: "kube-api-access-pnsn9") pod "107d5dc0-af83-4833-bf2f-cd99d0e1f15d" (UID: "107d5dc0-af83-4833-bf2f-cd99d0e1f15d"). InnerVolumeSpecName "kube-api-access-pnsn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.589968 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "107d5dc0-af83-4833-bf2f-cd99d0e1f15d" (UID: "107d5dc0-af83-4833-bf2f-cd99d0e1f15d"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.613257 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "719a114e-7ced-4950-9bce-749ce1431af8" (UID: "719a114e-7ced-4950-9bce-749ce1431af8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.641937 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-config-data" (OuterVolumeSpecName: "config-data") pod "719a114e-7ced-4950-9bce-749ce1431af8" (UID: "719a114e-7ced-4950-9bce-749ce1431af8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.641975 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-config" (OuterVolumeSpecName: "config") pod "107d5dc0-af83-4833-bf2f-cd99d0e1f15d" (UID: "107d5dc0-af83-4833-bf2f-cd99d0e1f15d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.658800 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "107d5dc0-af83-4833-bf2f-cd99d0e1f15d" (UID: "107d5dc0-af83-4833-bf2f-cd99d0e1f15d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.679897 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnsn9\" (UniqueName: \"kubernetes.io/projected/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-kube-api-access-pnsn9\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.679925 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.679935 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.679946 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.679991 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.680003 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/719a114e-7ced-4950-9bce-749ce1431af8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.698800 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "107d5dc0-af83-4833-bf2f-cd99d0e1f15d" (UID: "107d5dc0-af83-4833-bf2f-cd99d0e1f15d"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.782246 4755 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/107d5dc0-af83-4833-bf2f-cd99d0e1f15d-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.947072 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"719a114e-7ced-4950-9bce-749ce1431af8","Type":"ContainerDied","Data":"3e9a3625104568895fd1543787cdec5f042a74c06f1be3d2341b4ec5690ad374"} Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.947493 4755 scope.go:117] "RemoveContainer" containerID="a46108c25b9438f582147987d0e43bdf0f1dec1e7d3a95a12e2a1c45a98e7b31" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.947082 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.948923 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-85c9cbd9b8-2tlmh" Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.948940 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85c9cbd9b8-2tlmh" event={"ID":"107d5dc0-af83-4833-bf2f-cd99d0e1f15d","Type":"ContainerDied","Data":"69e8651d5f6e819cbabc2b4746acc52c19b5c08412179f776a7a30f3355d24f8"} Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.961105 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e160c11e-8b86-4837-9a86-bd2eb97f94d9","Type":"ContainerStarted","Data":"6c954c9f1ffab45b065a08f242601d93de6f068f3dabc46eb2a5d9f4e5fbc8e2"} Feb 02 22:55:05 crc kubenswrapper[4755]: I0202 22:55:05.993645 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.718257568 podStartE2EDuration="14.993618738s" podCreationTimestamp="2026-02-02 22:54:51 +0000 UTC" firstStartedPulling="2026-02-02 22:54:52.705355966 +0000 UTC m=+1248.396576292" lastFinishedPulling="2026-02-02 22:55:04.980717136 +0000 UTC m=+1260.671937462" observedRunningTime="2026-02-02 22:55:05.983617697 +0000 UTC m=+1261.674838033" watchObservedRunningTime="2026-02-02 22:55:05.993618738 +0000 UTC m=+1261.684839074" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.012190 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.013691 4755 scope.go:117] "RemoveContainer" containerID="575cfa105cdbe3bda469922da13e7a021eb5e7d085b3f06ecb7f43b073c73759" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.042949 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.056928 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-85c9cbd9b8-2tlmh"] Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.063036 4755 scope.go:117] "RemoveContainer" containerID="17317f6e9977ef6b00563c3e374efc7d5d24c99fff0798c311ab9d82240af718" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.074227 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-85c9cbd9b8-2tlmh"] Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.106245 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:06 crc kubenswrapper[4755]: E0202 22:55:06.110355 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="sg-core" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.110383 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="sg-core" Feb 02 22:55:06 crc kubenswrapper[4755]: E0202 22:55:06.110395 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="ceilometer-central-agent" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.110401 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="ceilometer-central-agent" Feb 02 22:55:06 crc kubenswrapper[4755]: E0202 22:55:06.110428 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107d5dc0-af83-4833-bf2f-cd99d0e1f15d" containerName="neutron-httpd" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.110434 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="107d5dc0-af83-4833-bf2f-cd99d0e1f15d" containerName="neutron-httpd" Feb 02 22:55:06 crc kubenswrapper[4755]: E0202 22:55:06.110442 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="ceilometer-notification-agent" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.110448 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="ceilometer-notification-agent" Feb 02 22:55:06 crc kubenswrapper[4755]: E0202 22:55:06.110456 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="proxy-httpd" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.110461 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="proxy-httpd" Feb 02 22:55:06 crc kubenswrapper[4755]: E0202 22:55:06.110477 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107d5dc0-af83-4833-bf2f-cd99d0e1f15d" containerName="neutron-api" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.110483 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="107d5dc0-af83-4833-bf2f-cd99d0e1f15d" containerName="neutron-api" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.110747 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="ceilometer-central-agent" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.110765 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="sg-core" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.110775 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="107d5dc0-af83-4833-bf2f-cd99d0e1f15d" containerName="neutron-api" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.110785 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="ceilometer-notification-agent" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.110804 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="719a114e-7ced-4950-9bce-749ce1431af8" containerName="proxy-httpd" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.110827 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="107d5dc0-af83-4833-bf2f-cd99d0e1f15d" containerName="neutron-httpd" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.112675 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.115145 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.116565 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.118946 4755 scope.go:117] "RemoveContainer" containerID="40543d008b0bb18e4894c6386b05a4baf60b9b181756b20c142fe801819c54bc" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.127603 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.145388 4755 scope.go:117] "RemoveContainer" containerID="cf39e69b8535440711741f79a967337f1e9e8694f99048739d48de23d6d54f68" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.164159 4755 scope.go:117] "RemoveContainer" containerID="6694fef6169e0b72eb1f3de2061069f4acd39aad940d7d53961c3036cd18ddd5" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.191470 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.191550 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.191588 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-scripts\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.191628 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1caed79b-dddf-4068-9f4a-0d014562bfce-log-httpd\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.191668 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1caed79b-dddf-4068-9f4a-0d014562bfce-run-httpd\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.191768 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5w9b\" (UniqueName: \"kubernetes.io/projected/1caed79b-dddf-4068-9f4a-0d014562bfce-kube-api-access-k5w9b\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.191855 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-config-data\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.293597 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1caed79b-dddf-4068-9f4a-0d014562bfce-run-httpd\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.293698 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5w9b\" (UniqueName: \"kubernetes.io/projected/1caed79b-dddf-4068-9f4a-0d014562bfce-kube-api-access-k5w9b\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.293818 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-config-data\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.293855 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.293908 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.293964 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-scripts\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.294000 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1caed79b-dddf-4068-9f4a-0d014562bfce-log-httpd\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.294159 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1caed79b-dddf-4068-9f4a-0d014562bfce-run-httpd\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.294382 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1caed79b-dddf-4068-9f4a-0d014562bfce-log-httpd\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.299173 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.299749 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-config-data\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.301473 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.311416 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5w9b\" (UniqueName: \"kubernetes.io/projected/1caed79b-dddf-4068-9f4a-0d014562bfce-kube-api-access-k5w9b\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.311899 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-scripts\") pod \"ceilometer-0\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.430309 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:06 crc kubenswrapper[4755]: I0202 22:55:06.995244 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:07 crc kubenswrapper[4755]: I0202 22:55:07.079688 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="107d5dc0-af83-4833-bf2f-cd99d0e1f15d" path="/var/lib/kubelet/pods/107d5dc0-af83-4833-bf2f-cd99d0e1f15d/volumes" Feb 02 22:55:07 crc kubenswrapper[4755]: I0202 22:55:07.080648 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="719a114e-7ced-4950-9bce-749ce1431af8" path="/var/lib/kubelet/pods/719a114e-7ced-4950-9bce-749ce1431af8/volumes" Feb 02 22:55:07 crc kubenswrapper[4755]: I0202 22:55:07.081482 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7857dcf8cf-dgghm" Feb 02 22:55:07 crc kubenswrapper[4755]: I0202 22:55:07.983443 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1caed79b-dddf-4068-9f4a-0d014562bfce","Type":"ContainerStarted","Data":"c4e17f2e14c2186a42ab1df0b4ccc290c3352b93039e1ff0b3b80deaf033cd5a"} Feb 02 22:55:07 crc kubenswrapper[4755]: I0202 22:55:07.983948 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1caed79b-dddf-4068-9f4a-0d014562bfce","Type":"ContainerStarted","Data":"5dc6a8eddf38752ce502b64cbc49a6ea40867f59ecaac2389229dc25843089e1"} Feb 02 22:55:08 crc kubenswrapper[4755]: I0202 22:55:08.993092 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1caed79b-dddf-4068-9f4a-0d014562bfce","Type":"ContainerStarted","Data":"ede5a715cedb0c5feaf407b82c82c0caf666ea38062ad8ff86d9eeb806f19380"} Feb 02 22:55:10 crc kubenswrapper[4755]: I0202 22:55:10.003161 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1caed79b-dddf-4068-9f4a-0d014562bfce","Type":"ContainerStarted","Data":"01fd900786e7ab6ac8122643d61f6bafd948485ea789eada040bc3f3c1ac20e8"} Feb 02 22:55:12 crc kubenswrapper[4755]: I0202 22:55:12.025614 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1caed79b-dddf-4068-9f4a-0d014562bfce","Type":"ContainerStarted","Data":"a4652644e5fae04947743f9679b0b9c225c3560380a54bfdd09ac7accfcc8400"} Feb 02 22:55:12 crc kubenswrapper[4755]: I0202 22:55:12.026130 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 22:55:12 crc kubenswrapper[4755]: I0202 22:55:12.056117 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.0043008 podStartE2EDuration="6.056098708s" podCreationTimestamp="2026-02-02 22:55:06 +0000 UTC" firstStartedPulling="2026-02-02 22:55:07.002464427 +0000 UTC m=+1262.693684763" lastFinishedPulling="2026-02-02 22:55:11.054262345 +0000 UTC m=+1266.745482671" observedRunningTime="2026-02-02 22:55:12.05295838 +0000 UTC m=+1267.744178716" watchObservedRunningTime="2026-02-02 22:55:12.056098708 +0000 UTC m=+1267.747319044" Feb 02 22:55:12 crc kubenswrapper[4755]: I0202 22:55:12.969085 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-wdj6p"] Feb 02 22:55:12 crc kubenswrapper[4755]: I0202 22:55:12.970992 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wdj6p" Feb 02 22:55:12 crc kubenswrapper[4755]: I0202 22:55:12.980188 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wdj6p"] Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.066164 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-jwwfl"] Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.067580 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jwwfl" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.079787 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-6bd1-account-create-update-7s7bt"] Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.081515 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6bd1-account-create-update-7s7bt" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.087046 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.094182 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6bd1-account-create-update-7s7bt"] Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.104205 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-jwwfl"] Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.147655 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srxp6\" (UniqueName: \"kubernetes.io/projected/1f3d167f-44ce-4d19-a53c-b9d370837a3d-kube-api-access-srxp6\") pod \"nova-api-db-create-wdj6p\" (UID: \"1f3d167f-44ce-4d19-a53c-b9d370837a3d\") " pod="openstack/nova-api-db-create-wdj6p" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.147874 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f3d167f-44ce-4d19-a53c-b9d370837a3d-operator-scripts\") pod \"nova-api-db-create-wdj6p\" (UID: \"1f3d167f-44ce-4d19-a53c-b9d370837a3d\") " pod="openstack/nova-api-db-create-wdj6p" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.249467 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gcnd\" (UniqueName: \"kubernetes.io/projected/29a63582-71ab-4de9-a5ab-ca97b11b0a73-kube-api-access-6gcnd\") pod \"nova-cell0-db-create-jwwfl\" (UID: \"29a63582-71ab-4de9-a5ab-ca97b11b0a73\") " pod="openstack/nova-cell0-db-create-jwwfl" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.249673 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29a63582-71ab-4de9-a5ab-ca97b11b0a73-operator-scripts\") pod \"nova-cell0-db-create-jwwfl\" (UID: \"29a63582-71ab-4de9-a5ab-ca97b11b0a73\") " pod="openstack/nova-cell0-db-create-jwwfl" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.249785 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srxp6\" (UniqueName: \"kubernetes.io/projected/1f3d167f-44ce-4d19-a53c-b9d370837a3d-kube-api-access-srxp6\") pod \"nova-api-db-create-wdj6p\" (UID: \"1f3d167f-44ce-4d19-a53c-b9d370837a3d\") " pod="openstack/nova-api-db-create-wdj6p" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.250203 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f3d167f-44ce-4d19-a53c-b9d370837a3d-operator-scripts\") pod \"nova-api-db-create-wdj6p\" (UID: \"1f3d167f-44ce-4d19-a53c-b9d370837a3d\") " pod="openstack/nova-api-db-create-wdj6p" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.251014 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f3d167f-44ce-4d19-a53c-b9d370837a3d-operator-scripts\") pod \"nova-api-db-create-wdj6p\" (UID: \"1f3d167f-44ce-4d19-a53c-b9d370837a3d\") " pod="openstack/nova-api-db-create-wdj6p" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.251229 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8-operator-scripts\") pod \"nova-api-6bd1-account-create-update-7s7bt\" (UID: \"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8\") " pod="openstack/nova-api-6bd1-account-create-update-7s7bt" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.251849 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmw7l\" (UniqueName: \"kubernetes.io/projected/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8-kube-api-access-nmw7l\") pod \"nova-api-6bd1-account-create-update-7s7bt\" (UID: \"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8\") " pod="openstack/nova-api-6bd1-account-create-update-7s7bt" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.278137 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-zzz84"] Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.279293 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zzz84" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.294877 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-zzz84"] Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.303505 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-3aa0-account-create-update-5fhf8"] Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.309290 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srxp6\" (UniqueName: \"kubernetes.io/projected/1f3d167f-44ce-4d19-a53c-b9d370837a3d-kube-api-access-srxp6\") pod \"nova-api-db-create-wdj6p\" (UID: \"1f3d167f-44ce-4d19-a53c-b9d370837a3d\") " pod="openstack/nova-api-db-create-wdj6p" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.318837 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.325552 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.345211 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-3aa0-account-create-update-5fhf8"] Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.362243 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8-operator-scripts\") pod \"nova-api-6bd1-account-create-update-7s7bt\" (UID: \"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8\") " pod="openstack/nova-api-6bd1-account-create-update-7s7bt" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.362294 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmw7l\" (UniqueName: \"kubernetes.io/projected/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8-kube-api-access-nmw7l\") pod \"nova-api-6bd1-account-create-update-7s7bt\" (UID: \"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8\") " pod="openstack/nova-api-6bd1-account-create-update-7s7bt" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.362340 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gcnd\" (UniqueName: \"kubernetes.io/projected/29a63582-71ab-4de9-a5ab-ca97b11b0a73-kube-api-access-6gcnd\") pod \"nova-cell0-db-create-jwwfl\" (UID: \"29a63582-71ab-4de9-a5ab-ca97b11b0a73\") " pod="openstack/nova-cell0-db-create-jwwfl" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.362379 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs4hp\" (UniqueName: \"kubernetes.io/projected/5b957bd8-a821-475c-9316-46a0e81487a8-kube-api-access-vs4hp\") pod \"nova-cell0-3aa0-account-create-update-5fhf8\" (UID: \"5b957bd8-a821-475c-9316-46a0e81487a8\") " pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.362435 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b957bd8-a821-475c-9316-46a0e81487a8-operator-scripts\") pod \"nova-cell0-3aa0-account-create-update-5fhf8\" (UID: \"5b957bd8-a821-475c-9316-46a0e81487a8\") " pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.362469 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29a63582-71ab-4de9-a5ab-ca97b11b0a73-operator-scripts\") pod \"nova-cell0-db-create-jwwfl\" (UID: \"29a63582-71ab-4de9-a5ab-ca97b11b0a73\") " pod="openstack/nova-cell0-db-create-jwwfl" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.363197 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29a63582-71ab-4de9-a5ab-ca97b11b0a73-operator-scripts\") pod \"nova-cell0-db-create-jwwfl\" (UID: \"29a63582-71ab-4de9-a5ab-ca97b11b0a73\") " pod="openstack/nova-cell0-db-create-jwwfl" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.363815 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8-operator-scripts\") pod \"nova-api-6bd1-account-create-update-7s7bt\" (UID: \"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8\") " pod="openstack/nova-api-6bd1-account-create-update-7s7bt" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.408613 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmw7l\" (UniqueName: \"kubernetes.io/projected/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8-kube-api-access-nmw7l\") pod \"nova-api-6bd1-account-create-update-7s7bt\" (UID: \"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8\") " pod="openstack/nova-api-6bd1-account-create-update-7s7bt" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.423637 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gcnd\" (UniqueName: \"kubernetes.io/projected/29a63582-71ab-4de9-a5ab-ca97b11b0a73-kube-api-access-6gcnd\") pod \"nova-cell0-db-create-jwwfl\" (UID: \"29a63582-71ab-4de9-a5ab-ca97b11b0a73\") " pod="openstack/nova-cell0-db-create-jwwfl" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.465399 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b957bd8-a821-475c-9316-46a0e81487a8-operator-scripts\") pod \"nova-cell0-3aa0-account-create-update-5fhf8\" (UID: \"5b957bd8-a821-475c-9316-46a0e81487a8\") " pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.465507 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8-operator-scripts\") pod \"nova-cell1-db-create-zzz84\" (UID: \"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8\") " pod="openstack/nova-cell1-db-create-zzz84" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.465554 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gmkt\" (UniqueName: \"kubernetes.io/projected/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8-kube-api-access-4gmkt\") pod \"nova-cell1-db-create-zzz84\" (UID: \"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8\") " pod="openstack/nova-cell1-db-create-zzz84" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.465603 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs4hp\" (UniqueName: \"kubernetes.io/projected/5b957bd8-a821-475c-9316-46a0e81487a8-kube-api-access-vs4hp\") pod \"nova-cell0-3aa0-account-create-update-5fhf8\" (UID: \"5b957bd8-a821-475c-9316-46a0e81487a8\") " pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.466561 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b957bd8-a821-475c-9316-46a0e81487a8-operator-scripts\") pod \"nova-cell0-3aa0-account-create-update-5fhf8\" (UID: \"5b957bd8-a821-475c-9316-46a0e81487a8\") " pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.505242 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs4hp\" (UniqueName: \"kubernetes.io/projected/5b957bd8-a821-475c-9316-46a0e81487a8-kube-api-access-vs4hp\") pod \"nova-cell0-3aa0-account-create-update-5fhf8\" (UID: \"5b957bd8-a821-475c-9316-46a0e81487a8\") " pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.520894 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-f9e8-account-create-update-p7swg"] Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.522050 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.523967 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.544307 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f9e8-account-create-update-p7swg"] Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.581474 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pjf8\" (UniqueName: \"kubernetes.io/projected/c0cbc632-66aa-4301-952c-a59fcbd3e884-kube-api-access-2pjf8\") pod \"nova-cell1-f9e8-account-create-update-p7swg\" (UID: \"c0cbc632-66aa-4301-952c-a59fcbd3e884\") " pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.583407 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8-operator-scripts\") pod \"nova-cell1-db-create-zzz84\" (UID: \"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8\") " pod="openstack/nova-cell1-db-create-zzz84" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.583494 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0cbc632-66aa-4301-952c-a59fcbd3e884-operator-scripts\") pod \"nova-cell1-f9e8-account-create-update-p7swg\" (UID: \"c0cbc632-66aa-4301-952c-a59fcbd3e884\") " pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.583575 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gmkt\" (UniqueName: \"kubernetes.io/projected/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8-kube-api-access-4gmkt\") pod \"nova-cell1-db-create-zzz84\" (UID: \"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8\") " pod="openstack/nova-cell1-db-create-zzz84" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.585287 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8-operator-scripts\") pod \"nova-cell1-db-create-zzz84\" (UID: \"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8\") " pod="openstack/nova-cell1-db-create-zzz84" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.598762 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wdj6p" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.604299 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gmkt\" (UniqueName: \"kubernetes.io/projected/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8-kube-api-access-4gmkt\") pod \"nova-cell1-db-create-zzz84\" (UID: \"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8\") " pod="openstack/nova-cell1-db-create-zzz84" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.661121 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.688133 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pjf8\" (UniqueName: \"kubernetes.io/projected/c0cbc632-66aa-4301-952c-a59fcbd3e884-kube-api-access-2pjf8\") pod \"nova-cell1-f9e8-account-create-update-p7swg\" (UID: \"c0cbc632-66aa-4301-952c-a59fcbd3e884\") " pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.688555 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0cbc632-66aa-4301-952c-a59fcbd3e884-operator-scripts\") pod \"nova-cell1-f9e8-account-create-update-p7swg\" (UID: \"c0cbc632-66aa-4301-952c-a59fcbd3e884\") " pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.688756 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jwwfl" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.690061 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0cbc632-66aa-4301-952c-a59fcbd3e884-operator-scripts\") pod \"nova-cell1-f9e8-account-create-update-p7swg\" (UID: \"c0cbc632-66aa-4301-952c-a59fcbd3e884\") " pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.701307 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6bd1-account-create-update-7s7bt" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.713814 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pjf8\" (UniqueName: \"kubernetes.io/projected/c0cbc632-66aa-4301-952c-a59fcbd3e884-kube-api-access-2pjf8\") pod \"nova-cell1-f9e8-account-create-update-p7swg\" (UID: \"c0cbc632-66aa-4301-952c-a59fcbd3e884\") " pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.884947 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" Feb 02 22:55:13 crc kubenswrapper[4755]: I0202 22:55:13.899148 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zzz84" Feb 02 22:55:14 crc kubenswrapper[4755]: I0202 22:55:14.484962 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-3aa0-account-create-update-5fhf8"] Feb 02 22:55:14 crc kubenswrapper[4755]: I0202 22:55:14.538052 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-jwwfl"] Feb 02 22:55:14 crc kubenswrapper[4755]: I0202 22:55:14.548797 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6bd1-account-create-update-7s7bt"] Feb 02 22:55:14 crc kubenswrapper[4755]: I0202 22:55:14.553798 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wdj6p"] Feb 02 22:55:14 crc kubenswrapper[4755]: I0202 22:55:14.631553 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f9e8-account-create-update-p7swg"] Feb 02 22:55:14 crc kubenswrapper[4755]: I0202 22:55:14.695173 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-zzz84"] Feb 02 22:55:14 crc kubenswrapper[4755]: W0202 22:55:14.702785 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0cbc632_66aa_4301_952c_a59fcbd3e884.slice/crio-71a510e50cf19896e9127e53cdc5858d288f874885de37ab1a56a054463c21f4 WatchSource:0}: Error finding container 71a510e50cf19896e9127e53cdc5858d288f874885de37ab1a56a054463c21f4: Status 404 returned error can't find the container with id 71a510e50cf19896e9127e53cdc5858d288f874885de37ab1a56a054463c21f4 Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.059922 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zzz84" event={"ID":"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8","Type":"ContainerStarted","Data":"a478a38b0d155268bdef66870aa039cd21d67f1464e4b71b70065ff6e0edbea3"} Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.059965 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zzz84" event={"ID":"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8","Type":"ContainerStarted","Data":"484e652a37f64c376dbe6656acd2a39590b4a1ad5f6060b084c0f807d8411590"} Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.061953 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" event={"ID":"c0cbc632-66aa-4301-952c-a59fcbd3e884","Type":"ContainerStarted","Data":"96b3b4f82e7155a2511283cdad4ad6b98845ccd5cd92ea505830eadbb353ec72"} Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.061992 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" event={"ID":"c0cbc632-66aa-4301-952c-a59fcbd3e884","Type":"ContainerStarted","Data":"71a510e50cf19896e9127e53cdc5858d288f874885de37ab1a56a054463c21f4"} Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.064265 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wdj6p" event={"ID":"1f3d167f-44ce-4d19-a53c-b9d370837a3d","Type":"ContainerStarted","Data":"eacabd414151b6051e6ad6dec8e77721fa956b24657208b3128f6511dd7e14c7"} Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.064311 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wdj6p" event={"ID":"1f3d167f-44ce-4d19-a53c-b9d370837a3d","Type":"ContainerStarted","Data":"1c0157bdd5b562f21197bbdcd91b643ae42110f663dd93d8ee67f3ccbfb0f3fc"} Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.066178 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" event={"ID":"5b957bd8-a821-475c-9316-46a0e81487a8","Type":"ContainerStarted","Data":"2a58ff7ea96df1ef5d3125c206fadc47431b673998f7f23ce7f6c519928b6504"} Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.066217 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" event={"ID":"5b957bd8-a821-475c-9316-46a0e81487a8","Type":"ContainerStarted","Data":"ef8c139f7b613e55f8028d1483348a23da900caeb1c0f9462508ee4029b91de3"} Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.082705 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-zzz84" podStartSLOduration=2.082685016 podStartE2EDuration="2.082685016s" podCreationTimestamp="2026-02-02 22:55:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:55:15.076615276 +0000 UTC m=+1270.767835602" watchObservedRunningTime="2026-02-02 22:55:15.082685016 +0000 UTC m=+1270.773905342" Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.086250 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jwwfl" event={"ID":"29a63582-71ab-4de9-a5ab-ca97b11b0a73","Type":"ContainerStarted","Data":"25751954eea5109293ea8bcedc5cab316b0a1cdaf0f5b1e357e5e3dd3796d9ee"} Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.086305 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jwwfl" event={"ID":"29a63582-71ab-4de9-a5ab-ca97b11b0a73","Type":"ContainerStarted","Data":"bb189b608a4f0567622b6ce0eb1c8c7a06800a1d3a15a5e9877314295ebc405a"} Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.086321 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6bd1-account-create-update-7s7bt" event={"ID":"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8","Type":"ContainerStarted","Data":"34ef74b8a9e586cb0ca95b126934071193f5b9a8aac551a0622699b5f0760bab"} Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.086332 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6bd1-account-create-update-7s7bt" event={"ID":"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8","Type":"ContainerStarted","Data":"7c8f3988a984c1643975971341819064cb8853d0e63a10a8d1025d615d8d9635"} Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.100311 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" podStartSLOduration=2.100294681 podStartE2EDuration="2.100294681s" podCreationTimestamp="2026-02-02 22:55:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:55:15.096863874 +0000 UTC m=+1270.788084200" watchObservedRunningTime="2026-02-02 22:55:15.100294681 +0000 UTC m=+1270.791515007" Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.119849 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-wdj6p" podStartSLOduration=3.119832589 podStartE2EDuration="3.119832589s" podCreationTimestamp="2026-02-02 22:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:55:15.11381695 +0000 UTC m=+1270.805037276" watchObservedRunningTime="2026-02-02 22:55:15.119832589 +0000 UTC m=+1270.811052915" Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.135980 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" podStartSLOduration=2.135965502 podStartE2EDuration="2.135965502s" podCreationTimestamp="2026-02-02 22:55:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:55:15.133980676 +0000 UTC m=+1270.825201002" watchObservedRunningTime="2026-02-02 22:55:15.135965502 +0000 UTC m=+1270.827185828" Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.162869 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-6bd1-account-create-update-7s7bt" podStartSLOduration=2.162849996 podStartE2EDuration="2.162849996s" podCreationTimestamp="2026-02-02 22:55:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:55:15.156246271 +0000 UTC m=+1270.847466607" watchObservedRunningTime="2026-02-02 22:55:15.162849996 +0000 UTC m=+1270.854070322" Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.171860 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-jwwfl" podStartSLOduration=2.171848729 podStartE2EDuration="2.171848729s" podCreationTimestamp="2026-02-02 22:55:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:55:15.168019901 +0000 UTC m=+1270.859240227" watchObservedRunningTime="2026-02-02 22:55:15.171848729 +0000 UTC m=+1270.863069055" Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.343017 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.343335 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="ceilometer-central-agent" containerID="cri-o://c4e17f2e14c2186a42ab1df0b4ccc290c3352b93039e1ff0b3b80deaf033cd5a" gracePeriod=30 Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.343709 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="proxy-httpd" containerID="cri-o://a4652644e5fae04947743f9679b0b9c225c3560380a54bfdd09ac7accfcc8400" gracePeriod=30 Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.344262 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="ceilometer-notification-agent" containerID="cri-o://ede5a715cedb0c5feaf407b82c82c0caf666ea38062ad8ff86d9eeb806f19380" gracePeriod=30 Feb 02 22:55:15 crc kubenswrapper[4755]: I0202 22:55:15.344416 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="sg-core" containerID="cri-o://01fd900786e7ab6ac8122643d61f6bafd948485ea789eada040bc3f3c1ac20e8" gracePeriod=30 Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.101564 4755 generic.go:334] "Generic (PLEG): container finished" podID="1f3d167f-44ce-4d19-a53c-b9d370837a3d" containerID="eacabd414151b6051e6ad6dec8e77721fa956b24657208b3128f6511dd7e14c7" exitCode=0 Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.101872 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wdj6p" event={"ID":"1f3d167f-44ce-4d19-a53c-b9d370837a3d","Type":"ContainerDied","Data":"eacabd414151b6051e6ad6dec8e77721fa956b24657208b3128f6511dd7e14c7"} Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.108513 4755 generic.go:334] "Generic (PLEG): container finished" podID="5b957bd8-a821-475c-9316-46a0e81487a8" containerID="2a58ff7ea96df1ef5d3125c206fadc47431b673998f7f23ce7f6c519928b6504" exitCode=0 Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.108569 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" event={"ID":"5b957bd8-a821-475c-9316-46a0e81487a8","Type":"ContainerDied","Data":"2a58ff7ea96df1ef5d3125c206fadc47431b673998f7f23ce7f6c519928b6504"} Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.114875 4755 generic.go:334] "Generic (PLEG): container finished" podID="29a63582-71ab-4de9-a5ab-ca97b11b0a73" containerID="25751954eea5109293ea8bcedc5cab316b0a1cdaf0f5b1e357e5e3dd3796d9ee" exitCode=0 Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.115005 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jwwfl" event={"ID":"29a63582-71ab-4de9-a5ab-ca97b11b0a73","Type":"ContainerDied","Data":"25751954eea5109293ea8bcedc5cab316b0a1cdaf0f5b1e357e5e3dd3796d9ee"} Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.121306 4755 generic.go:334] "Generic (PLEG): container finished" podID="3fcb6639-8e5c-41c3-8b9c-ce398dffaba8" containerID="34ef74b8a9e586cb0ca95b126934071193f5b9a8aac551a0622699b5f0760bab" exitCode=0 Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.121381 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6bd1-account-create-update-7s7bt" event={"ID":"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8","Type":"ContainerDied","Data":"34ef74b8a9e586cb0ca95b126934071193f5b9a8aac551a0622699b5f0760bab"} Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.124446 4755 generic.go:334] "Generic (PLEG): container finished" podID="9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8" containerID="a478a38b0d155268bdef66870aa039cd21d67f1464e4b71b70065ff6e0edbea3" exitCode=0 Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.124549 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zzz84" event={"ID":"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8","Type":"ContainerDied","Data":"a478a38b0d155268bdef66870aa039cd21d67f1464e4b71b70065ff6e0edbea3"} Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.126589 4755 generic.go:334] "Generic (PLEG): container finished" podID="c0cbc632-66aa-4301-952c-a59fcbd3e884" containerID="96b3b4f82e7155a2511283cdad4ad6b98845ccd5cd92ea505830eadbb353ec72" exitCode=0 Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.126641 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" event={"ID":"c0cbc632-66aa-4301-952c-a59fcbd3e884","Type":"ContainerDied","Data":"96b3b4f82e7155a2511283cdad4ad6b98845ccd5cd92ea505830eadbb353ec72"} Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.140450 4755 generic.go:334] "Generic (PLEG): container finished" podID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerID="a4652644e5fae04947743f9679b0b9c225c3560380a54bfdd09ac7accfcc8400" exitCode=0 Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.140714 4755 generic.go:334] "Generic (PLEG): container finished" podID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerID="01fd900786e7ab6ac8122643d61f6bafd948485ea789eada040bc3f3c1ac20e8" exitCode=2 Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.140810 4755 generic.go:334] "Generic (PLEG): container finished" podID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerID="ede5a715cedb0c5feaf407b82c82c0caf666ea38062ad8ff86d9eeb806f19380" exitCode=0 Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.140818 4755 generic.go:334] "Generic (PLEG): container finished" podID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerID="c4e17f2e14c2186a42ab1df0b4ccc290c3352b93039e1ff0b3b80deaf033cd5a" exitCode=0 Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.140842 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1caed79b-dddf-4068-9f4a-0d014562bfce","Type":"ContainerDied","Data":"a4652644e5fae04947743f9679b0b9c225c3560380a54bfdd09ac7accfcc8400"} Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.140869 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1caed79b-dddf-4068-9f4a-0d014562bfce","Type":"ContainerDied","Data":"01fd900786e7ab6ac8122643d61f6bafd948485ea789eada040bc3f3c1ac20e8"} Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.140879 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1caed79b-dddf-4068-9f4a-0d014562bfce","Type":"ContainerDied","Data":"ede5a715cedb0c5feaf407b82c82c0caf666ea38062ad8ff86d9eeb806f19380"} Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.140888 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1caed79b-dddf-4068-9f4a-0d014562bfce","Type":"ContainerDied","Data":"c4e17f2e14c2186a42ab1df0b4ccc290c3352b93039e1ff0b3b80deaf033cd5a"} Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.192863 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.358593 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-scripts\") pod \"1caed79b-dddf-4068-9f4a-0d014562bfce\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.358680 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-sg-core-conf-yaml\") pod \"1caed79b-dddf-4068-9f4a-0d014562bfce\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.358879 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5w9b\" (UniqueName: \"kubernetes.io/projected/1caed79b-dddf-4068-9f4a-0d014562bfce-kube-api-access-k5w9b\") pod \"1caed79b-dddf-4068-9f4a-0d014562bfce\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.358962 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1caed79b-dddf-4068-9f4a-0d014562bfce-log-httpd\") pod \"1caed79b-dddf-4068-9f4a-0d014562bfce\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.359094 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1caed79b-dddf-4068-9f4a-0d014562bfce-run-httpd\") pod \"1caed79b-dddf-4068-9f4a-0d014562bfce\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.359139 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-config-data\") pod \"1caed79b-dddf-4068-9f4a-0d014562bfce\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.359349 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1caed79b-dddf-4068-9f4a-0d014562bfce-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1caed79b-dddf-4068-9f4a-0d014562bfce" (UID: "1caed79b-dddf-4068-9f4a-0d014562bfce"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.359368 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1caed79b-dddf-4068-9f4a-0d014562bfce-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1caed79b-dddf-4068-9f4a-0d014562bfce" (UID: "1caed79b-dddf-4068-9f4a-0d014562bfce"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.359391 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-combined-ca-bundle\") pod \"1caed79b-dddf-4068-9f4a-0d014562bfce\" (UID: \"1caed79b-dddf-4068-9f4a-0d014562bfce\") " Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.360251 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1caed79b-dddf-4068-9f4a-0d014562bfce-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.360281 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1caed79b-dddf-4068-9f4a-0d014562bfce-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.365088 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1caed79b-dddf-4068-9f4a-0d014562bfce-kube-api-access-k5w9b" (OuterVolumeSpecName: "kube-api-access-k5w9b") pod "1caed79b-dddf-4068-9f4a-0d014562bfce" (UID: "1caed79b-dddf-4068-9f4a-0d014562bfce"). InnerVolumeSpecName "kube-api-access-k5w9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.374966 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-scripts" (OuterVolumeSpecName: "scripts") pod "1caed79b-dddf-4068-9f4a-0d014562bfce" (UID: "1caed79b-dddf-4068-9f4a-0d014562bfce"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.396364 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1caed79b-dddf-4068-9f4a-0d014562bfce" (UID: "1caed79b-dddf-4068-9f4a-0d014562bfce"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.456609 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1caed79b-dddf-4068-9f4a-0d014562bfce" (UID: "1caed79b-dddf-4068-9f4a-0d014562bfce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.463935 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.463979 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.463992 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.464002 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5w9b\" (UniqueName: \"kubernetes.io/projected/1caed79b-dddf-4068-9f4a-0d014562bfce-kube-api-access-k5w9b\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.478780 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-config-data" (OuterVolumeSpecName: "config-data") pod "1caed79b-dddf-4068-9f4a-0d014562bfce" (UID: "1caed79b-dddf-4068-9f4a-0d014562bfce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.565434 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1caed79b-dddf-4068-9f4a-0d014562bfce-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.612913 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.613195 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5ee3fd38-71d5-429f-87d5-1c3556ddff55" containerName="glance-log" containerID="cri-o://f0496aa247df17c0c4a844b37679945755b95ce5a19a813c19f9f84bfb3d4c29" gracePeriod=30 Feb 02 22:55:16 crc kubenswrapper[4755]: I0202 22:55:16.613714 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5ee3fd38-71d5-429f-87d5-1c3556ddff55" containerName="glance-httpd" containerID="cri-o://9e76d4b8d4fd58f8070c673771c4dab54e8d08ed55c687d8b000cd092756dcb6" gracePeriod=30 Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.151880 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1caed79b-dddf-4068-9f4a-0d014562bfce","Type":"ContainerDied","Data":"5dc6a8eddf38752ce502b64cbc49a6ea40867f59ecaac2389229dc25843089e1"} Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.152141 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.152154 4755 scope.go:117] "RemoveContainer" containerID="a4652644e5fae04947743f9679b0b9c225c3560380a54bfdd09ac7accfcc8400" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.155701 4755 generic.go:334] "Generic (PLEG): container finished" podID="5ee3fd38-71d5-429f-87d5-1c3556ddff55" containerID="f0496aa247df17c0c4a844b37679945755b95ce5a19a813c19f9f84bfb3d4c29" exitCode=143 Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.155817 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5ee3fd38-71d5-429f-87d5-1c3556ddff55","Type":"ContainerDied","Data":"f0496aa247df17c0c4a844b37679945755b95ce5a19a813c19f9f84bfb3d4c29"} Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.183483 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.208311 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.221857 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:17 crc kubenswrapper[4755]: E0202 22:55:17.222258 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="proxy-httpd" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.222273 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="proxy-httpd" Feb 02 22:55:17 crc kubenswrapper[4755]: E0202 22:55:17.222284 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="ceilometer-central-agent" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.222291 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="ceilometer-central-agent" Feb 02 22:55:17 crc kubenswrapper[4755]: E0202 22:55:17.222301 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="sg-core" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.222307 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="sg-core" Feb 02 22:55:17 crc kubenswrapper[4755]: E0202 22:55:17.222320 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="ceilometer-notification-agent" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.222326 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="ceilometer-notification-agent" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.222502 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="sg-core" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.222517 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="ceilometer-notification-agent" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.222525 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="ceilometer-central-agent" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.222545 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" containerName="proxy-httpd" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.224277 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.227871 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.228061 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.243278 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.256164 4755 scope.go:117] "RemoveContainer" containerID="01fd900786e7ab6ac8122643d61f6bafd948485ea789eada040bc3f3c1ac20e8" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.331716 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:17 crc kubenswrapper[4755]: E0202 22:55:17.332530 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-64q6c log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="6dcd8408-2bd4-4839-8e88-645b590d7f84" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.333865 4755 scope.go:117] "RemoveContainer" containerID="ede5a715cedb0c5feaf407b82c82c0caf666ea38062ad8ff86d9eeb806f19380" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.381649 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.381695 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dcd8408-2bd4-4839-8e88-645b590d7f84-log-httpd\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.381780 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-scripts\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.381797 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.381871 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64q6c\" (UniqueName: \"kubernetes.io/projected/6dcd8408-2bd4-4839-8e88-645b590d7f84-kube-api-access-64q6c\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.381890 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dcd8408-2bd4-4839-8e88-645b590d7f84-run-httpd\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.381965 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-config-data\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.455374 4755 scope.go:117] "RemoveContainer" containerID="c4e17f2e14c2186a42ab1df0b4ccc290c3352b93039e1ff0b3b80deaf033cd5a" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.486860 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64q6c\" (UniqueName: \"kubernetes.io/projected/6dcd8408-2bd4-4839-8e88-645b590d7f84-kube-api-access-64q6c\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.486908 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dcd8408-2bd4-4839-8e88-645b590d7f84-run-httpd\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.486978 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-config-data\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.487015 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.487039 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dcd8408-2bd4-4839-8e88-645b590d7f84-log-httpd\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.487076 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-scripts\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.487091 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.489300 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dcd8408-2bd4-4839-8e88-645b590d7f84-log-httpd\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.495149 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dcd8408-2bd4-4839-8e88-645b590d7f84-run-httpd\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.496760 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-config-data\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.498480 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.504512 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.526394 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-scripts\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.566553 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64q6c\" (UniqueName: \"kubernetes.io/projected/6dcd8408-2bd4-4839-8e88-645b590d7f84-kube-api-access-64q6c\") pod \"ceilometer-0\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " pod="openstack/ceilometer-0" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.875136 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jwwfl" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.964579 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.998038 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29a63582-71ab-4de9-a5ab-ca97b11b0a73-operator-scripts\") pod \"29a63582-71ab-4de9-a5ab-ca97b11b0a73\" (UID: \"29a63582-71ab-4de9-a5ab-ca97b11b0a73\") " Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.998433 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gcnd\" (UniqueName: \"kubernetes.io/projected/29a63582-71ab-4de9-a5ab-ca97b11b0a73-kube-api-access-6gcnd\") pod \"29a63582-71ab-4de9-a5ab-ca97b11b0a73\" (UID: \"29a63582-71ab-4de9-a5ab-ca97b11b0a73\") " Feb 02 22:55:17 crc kubenswrapper[4755]: I0202 22:55:17.998915 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29a63582-71ab-4de9-a5ab-ca97b11b0a73-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "29a63582-71ab-4de9-a5ab-ca97b11b0a73" (UID: "29a63582-71ab-4de9-a5ab-ca97b11b0a73"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.007445 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29a63582-71ab-4de9-a5ab-ca97b11b0a73-kube-api-access-6gcnd" (OuterVolumeSpecName: "kube-api-access-6gcnd") pod "29a63582-71ab-4de9-a5ab-ca97b11b0a73" (UID: "29a63582-71ab-4de9-a5ab-ca97b11b0a73"). InnerVolumeSpecName "kube-api-access-6gcnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.009907 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6bd1-account-create-update-7s7bt" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.015600 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.100639 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8-operator-scripts\") pod \"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8\" (UID: \"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.101397 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pjf8\" (UniqueName: \"kubernetes.io/projected/c0cbc632-66aa-4301-952c-a59fcbd3e884-kube-api-access-2pjf8\") pod \"c0cbc632-66aa-4301-952c-a59fcbd3e884\" (UID: \"c0cbc632-66aa-4301-952c-a59fcbd3e884\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.101466 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0cbc632-66aa-4301-952c-a59fcbd3e884-operator-scripts\") pod \"c0cbc632-66aa-4301-952c-a59fcbd3e884\" (UID: \"c0cbc632-66aa-4301-952c-a59fcbd3e884\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.101513 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b957bd8-a821-475c-9316-46a0e81487a8-operator-scripts\") pod \"5b957bd8-a821-475c-9316-46a0e81487a8\" (UID: \"5b957bd8-a821-475c-9316-46a0e81487a8\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.101576 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmw7l\" (UniqueName: \"kubernetes.io/projected/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8-kube-api-access-nmw7l\") pod \"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8\" (UID: \"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.101654 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vs4hp\" (UniqueName: \"kubernetes.io/projected/5b957bd8-a821-475c-9316-46a0e81487a8-kube-api-access-vs4hp\") pod \"5b957bd8-a821-475c-9316-46a0e81487a8\" (UID: \"5b957bd8-a821-475c-9316-46a0e81487a8\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.102469 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0cbc632-66aa-4301-952c-a59fcbd3e884-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c0cbc632-66aa-4301-952c-a59fcbd3e884" (UID: "c0cbc632-66aa-4301-952c-a59fcbd3e884"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.102525 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29a63582-71ab-4de9-a5ab-ca97b11b0a73-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.102562 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gcnd\" (UniqueName: \"kubernetes.io/projected/29a63582-71ab-4de9-a5ab-ca97b11b0a73-kube-api-access-6gcnd\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.102944 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3fcb6639-8e5c-41c3-8b9c-ce398dffaba8" (UID: "3fcb6639-8e5c-41c3-8b9c-ce398dffaba8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.103116 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b957bd8-a821-475c-9316-46a0e81487a8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5b957bd8-a821-475c-9316-46a0e81487a8" (UID: "5b957bd8-a821-475c-9316-46a0e81487a8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.115895 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b957bd8-a821-475c-9316-46a0e81487a8-kube-api-access-vs4hp" (OuterVolumeSpecName: "kube-api-access-vs4hp") pod "5b957bd8-a821-475c-9316-46a0e81487a8" (UID: "5b957bd8-a821-475c-9316-46a0e81487a8"). InnerVolumeSpecName "kube-api-access-vs4hp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.121457 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8-kube-api-access-nmw7l" (OuterVolumeSpecName: "kube-api-access-nmw7l") pod "3fcb6639-8e5c-41c3-8b9c-ce398dffaba8" (UID: "3fcb6639-8e5c-41c3-8b9c-ce398dffaba8"). InnerVolumeSpecName "kube-api-access-nmw7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.122286 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0cbc632-66aa-4301-952c-a59fcbd3e884-kube-api-access-2pjf8" (OuterVolumeSpecName: "kube-api-access-2pjf8") pod "c0cbc632-66aa-4301-952c-a59fcbd3e884" (UID: "c0cbc632-66aa-4301-952c-a59fcbd3e884"). InnerVolumeSpecName "kube-api-access-2pjf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.209654 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmw7l\" (UniqueName: \"kubernetes.io/projected/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8-kube-api-access-nmw7l\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.209690 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vs4hp\" (UniqueName: \"kubernetes.io/projected/5b957bd8-a821-475c-9316-46a0e81487a8-kube-api-access-vs4hp\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.209701 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.209711 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pjf8\" (UniqueName: \"kubernetes.io/projected/c0cbc632-66aa-4301-952c-a59fcbd3e884-kube-api-access-2pjf8\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.209720 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0cbc632-66aa-4301-952c-a59fcbd3e884-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.209752 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b957bd8-a821-475c-9316-46a0e81487a8-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.219263 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" event={"ID":"c0cbc632-66aa-4301-952c-a59fcbd3e884","Type":"ContainerDied","Data":"71a510e50cf19896e9127e53cdc5858d288f874885de37ab1a56a054463c21f4"} Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.219324 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71a510e50cf19896e9127e53cdc5858d288f874885de37ab1a56a054463c21f4" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.219468 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f9e8-account-create-update-p7swg" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.223790 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.224031 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ae1efe99-ce8f-4ef7-b641-dd666e3864ea" containerName="glance-log" containerID="cri-o://13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb" gracePeriod=30 Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.224481 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ae1efe99-ce8f-4ef7-b641-dd666e3864ea" containerName="glance-httpd" containerID="cri-o://cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef" gracePeriod=30 Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.236524 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.236861 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3aa0-account-create-update-5fhf8" event={"ID":"5b957bd8-a821-475c-9316-46a0e81487a8","Type":"ContainerDied","Data":"ef8c139f7b613e55f8028d1483348a23da900caeb1c0f9462508ee4029b91de3"} Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.236906 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef8c139f7b613e55f8028d1483348a23da900caeb1c0f9462508ee4029b91de3" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.239648 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jwwfl" event={"ID":"29a63582-71ab-4de9-a5ab-ca97b11b0a73","Type":"ContainerDied","Data":"bb189b608a4f0567622b6ce0eb1c8c7a06800a1d3a15a5e9877314295ebc405a"} Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.239686 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb189b608a4f0567622b6ce0eb1c8c7a06800a1d3a15a5e9877314295ebc405a" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.240178 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jwwfl" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.251565 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.252952 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6bd1-account-create-update-7s7bt" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.253283 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6bd1-account-create-update-7s7bt" event={"ID":"3fcb6639-8e5c-41c3-8b9c-ce398dffaba8","Type":"ContainerDied","Data":"7c8f3988a984c1643975971341819064cb8853d0e63a10a8d1025d615d8d9635"} Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.253318 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c8f3988a984c1643975971341819064cb8853d0e63a10a8d1025d615d8d9635" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.263069 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wdj6p" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.334250 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zzz84" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.349771 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.413828 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f3d167f-44ce-4d19-a53c-b9d370837a3d-operator-scripts\") pod \"1f3d167f-44ce-4d19-a53c-b9d370837a3d\" (UID: \"1f3d167f-44ce-4d19-a53c-b9d370837a3d\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.414090 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dcd8408-2bd4-4839-8e88-645b590d7f84-log-httpd\") pod \"6dcd8408-2bd4-4839-8e88-645b590d7f84\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.414173 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srxp6\" (UniqueName: \"kubernetes.io/projected/1f3d167f-44ce-4d19-a53c-b9d370837a3d-kube-api-access-srxp6\") pod \"1f3d167f-44ce-4d19-a53c-b9d370837a3d\" (UID: \"1f3d167f-44ce-4d19-a53c-b9d370837a3d\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.414243 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64q6c\" (UniqueName: \"kubernetes.io/projected/6dcd8408-2bd4-4839-8e88-645b590d7f84-kube-api-access-64q6c\") pod \"6dcd8408-2bd4-4839-8e88-645b590d7f84\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.414319 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8-operator-scripts\") pod \"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8\" (UID: \"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.414406 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-sg-core-conf-yaml\") pod \"6dcd8408-2bd4-4839-8e88-645b590d7f84\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.414483 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-combined-ca-bundle\") pod \"6dcd8408-2bd4-4839-8e88-645b590d7f84\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.414351 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f3d167f-44ce-4d19-a53c-b9d370837a3d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1f3d167f-44ce-4d19-a53c-b9d370837a3d" (UID: "1f3d167f-44ce-4d19-a53c-b9d370837a3d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.414756 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-config-data\") pod \"6dcd8408-2bd4-4839-8e88-645b590d7f84\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.414840 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dcd8408-2bd4-4839-8e88-645b590d7f84-run-httpd\") pod \"6dcd8408-2bd4-4839-8e88-645b590d7f84\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.414924 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gmkt\" (UniqueName: \"kubernetes.io/projected/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8-kube-api-access-4gmkt\") pod \"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8\" (UID: \"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.415359 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f3d167f-44ce-4d19-a53c-b9d370837a3d-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.414881 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8" (UID: "9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.415265 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dcd8408-2bd4-4839-8e88-645b590d7f84-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6dcd8408-2bd4-4839-8e88-645b590d7f84" (UID: "6dcd8408-2bd4-4839-8e88-645b590d7f84"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.415758 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dcd8408-2bd4-4839-8e88-645b590d7f84-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6dcd8408-2bd4-4839-8e88-645b590d7f84" (UID: "6dcd8408-2bd4-4839-8e88-645b590d7f84"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.417135 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f3d167f-44ce-4d19-a53c-b9d370837a3d-kube-api-access-srxp6" (OuterVolumeSpecName: "kube-api-access-srxp6") pod "1f3d167f-44ce-4d19-a53c-b9d370837a3d" (UID: "1f3d167f-44ce-4d19-a53c-b9d370837a3d"). InnerVolumeSpecName "kube-api-access-srxp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.419846 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-config-data" (OuterVolumeSpecName: "config-data") pod "6dcd8408-2bd4-4839-8e88-645b590d7f84" (UID: "6dcd8408-2bd4-4839-8e88-645b590d7f84"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.419856 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dcd8408-2bd4-4839-8e88-645b590d7f84-kube-api-access-64q6c" (OuterVolumeSpecName: "kube-api-access-64q6c") pod "6dcd8408-2bd4-4839-8e88-645b590d7f84" (UID: "6dcd8408-2bd4-4839-8e88-645b590d7f84"). InnerVolumeSpecName "kube-api-access-64q6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.420425 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6dcd8408-2bd4-4839-8e88-645b590d7f84" (UID: "6dcd8408-2bd4-4839-8e88-645b590d7f84"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.420425 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8-kube-api-access-4gmkt" (OuterVolumeSpecName: "kube-api-access-4gmkt") pod "9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8" (UID: "9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8"). InnerVolumeSpecName "kube-api-access-4gmkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.421858 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6dcd8408-2bd4-4839-8e88-645b590d7f84" (UID: "6dcd8408-2bd4-4839-8e88-645b590d7f84"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.517004 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-scripts\") pod \"6dcd8408-2bd4-4839-8e88-645b590d7f84\" (UID: \"6dcd8408-2bd4-4839-8e88-645b590d7f84\") " Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.518188 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.518280 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dcd8408-2bd4-4839-8e88-645b590d7f84-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.518336 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gmkt\" (UniqueName: \"kubernetes.io/projected/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8-kube-api-access-4gmkt\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.518390 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6dcd8408-2bd4-4839-8e88-645b590d7f84-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.518447 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srxp6\" (UniqueName: \"kubernetes.io/projected/1f3d167f-44ce-4d19-a53c-b9d370837a3d-kube-api-access-srxp6\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.518501 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64q6c\" (UniqueName: \"kubernetes.io/projected/6dcd8408-2bd4-4839-8e88-645b590d7f84-kube-api-access-64q6c\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.518552 4755 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.518612 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.518665 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.522843 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-scripts" (OuterVolumeSpecName: "scripts") pod "6dcd8408-2bd4-4839-8e88-645b590d7f84" (UID: "6dcd8408-2bd4-4839-8e88-645b590d7f84"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:18 crc kubenswrapper[4755]: I0202 22:55:18.620629 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dcd8408-2bd4-4839-8e88-645b590d7f84-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.083689 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1caed79b-dddf-4068-9f4a-0d014562bfce" path="/var/lib/kubelet/pods/1caed79b-dddf-4068-9f4a-0d014562bfce/volumes" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.261404 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wdj6p" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.261432 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wdj6p" event={"ID":"1f3d167f-44ce-4d19-a53c-b9d370837a3d","Type":"ContainerDied","Data":"1c0157bdd5b562f21197bbdcd91b643ae42110f663dd93d8ee67f3ccbfb0f3fc"} Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.262194 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c0157bdd5b562f21197bbdcd91b643ae42110f663dd93d8ee67f3ccbfb0f3fc" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.264947 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae1efe99-ce8f-4ef7-b641-dd666e3864ea" containerID="13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb" exitCode=143 Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.265018 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae1efe99-ce8f-4ef7-b641-dd666e3864ea","Type":"ContainerDied","Data":"13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb"} Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.267735 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.267759 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zzz84" event={"ID":"9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8","Type":"ContainerDied","Data":"484e652a37f64c376dbe6656acd2a39590b4a1ad5f6060b084c0f807d8411590"} Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.267788 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="484e652a37f64c376dbe6656acd2a39590b4a1ad5f6060b084c0f807d8411590" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.267744 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zzz84" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.318324 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.333464 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.353194 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:19 crc kubenswrapper[4755]: E0202 22:55:19.353625 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29a63582-71ab-4de9-a5ab-ca97b11b0a73" containerName="mariadb-database-create" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.353640 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="29a63582-71ab-4de9-a5ab-ca97b11b0a73" containerName="mariadb-database-create" Feb 02 22:55:19 crc kubenswrapper[4755]: E0202 22:55:19.353653 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b957bd8-a821-475c-9316-46a0e81487a8" containerName="mariadb-account-create-update" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.353658 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b957bd8-a821-475c-9316-46a0e81487a8" containerName="mariadb-account-create-update" Feb 02 22:55:19 crc kubenswrapper[4755]: E0202 22:55:19.353670 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f3d167f-44ce-4d19-a53c-b9d370837a3d" containerName="mariadb-database-create" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.353676 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f3d167f-44ce-4d19-a53c-b9d370837a3d" containerName="mariadb-database-create" Feb 02 22:55:19 crc kubenswrapper[4755]: E0202 22:55:19.353688 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fcb6639-8e5c-41c3-8b9c-ce398dffaba8" containerName="mariadb-account-create-update" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.353694 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fcb6639-8e5c-41c3-8b9c-ce398dffaba8" containerName="mariadb-account-create-update" Feb 02 22:55:19 crc kubenswrapper[4755]: E0202 22:55:19.353707 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0cbc632-66aa-4301-952c-a59fcbd3e884" containerName="mariadb-account-create-update" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.353712 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0cbc632-66aa-4301-952c-a59fcbd3e884" containerName="mariadb-account-create-update" Feb 02 22:55:19 crc kubenswrapper[4755]: E0202 22:55:19.353722 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8" containerName="mariadb-database-create" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.353742 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8" containerName="mariadb-database-create" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.353952 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fcb6639-8e5c-41c3-8b9c-ce398dffaba8" containerName="mariadb-account-create-update" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.353962 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0cbc632-66aa-4301-952c-a59fcbd3e884" containerName="mariadb-account-create-update" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.353973 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8" containerName="mariadb-database-create" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.353983 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f3d167f-44ce-4d19-a53c-b9d370837a3d" containerName="mariadb-database-create" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.354007 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b957bd8-a821-475c-9316-46a0e81487a8" containerName="mariadb-account-create-update" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.354014 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="29a63582-71ab-4de9-a5ab-ca97b11b0a73" containerName="mariadb-database-create" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.356539 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.361382 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.363509 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.363740 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.436509 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/742a3e8c-e69e-4886-9ffd-8129c949ce2c-log-httpd\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.436584 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-scripts\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.436697 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zjmj\" (UniqueName: \"kubernetes.io/projected/742a3e8c-e69e-4886-9ffd-8129c949ce2c-kube-api-access-2zjmj\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.436754 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/742a3e8c-e69e-4886-9ffd-8129c949ce2c-run-httpd\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.436845 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.436867 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-config-data\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.436947 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.538863 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/742a3e8c-e69e-4886-9ffd-8129c949ce2c-run-httpd\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.538924 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.538944 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-config-data\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.539031 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.539062 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/742a3e8c-e69e-4886-9ffd-8129c949ce2c-log-httpd\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.539098 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-scripts\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.539149 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zjmj\" (UniqueName: \"kubernetes.io/projected/742a3e8c-e69e-4886-9ffd-8129c949ce2c-kube-api-access-2zjmj\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.539331 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/742a3e8c-e69e-4886-9ffd-8129c949ce2c-run-httpd\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.539788 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/742a3e8c-e69e-4886-9ffd-8129c949ce2c-log-httpd\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.543459 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-config-data\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.551575 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.551634 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-scripts\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.551638 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.556690 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zjmj\" (UniqueName: \"kubernetes.io/projected/742a3e8c-e69e-4886-9ffd-8129c949ce2c-kube-api-access-2zjmj\") pod \"ceilometer-0\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.672321 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:19 crc kubenswrapper[4755]: I0202 22:55:19.740939 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:20 crc kubenswrapper[4755]: W0202 22:55:20.197870 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod742a3e8c_e69e_4886_9ffd_8129c949ce2c.slice/crio-d865aba64696c737a9925ef6afdc637d8eeb0d482ea94ab37d2d74bc2256da8d WatchSource:0}: Error finding container d865aba64696c737a9925ef6afdc637d8eeb0d482ea94ab37d2d74bc2256da8d: Status 404 returned error can't find the container with id d865aba64696c737a9925ef6afdc637d8eeb0d482ea94ab37d2d74bc2256da8d Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.203907 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.280199 4755 generic.go:334] "Generic (PLEG): container finished" podID="5ee3fd38-71d5-429f-87d5-1c3556ddff55" containerID="9e76d4b8d4fd58f8070c673771c4dab54e8d08ed55c687d8b000cd092756dcb6" exitCode=0 Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.280255 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5ee3fd38-71d5-429f-87d5-1c3556ddff55","Type":"ContainerDied","Data":"9e76d4b8d4fd58f8070c673771c4dab54e8d08ed55c687d8b000cd092756dcb6"} Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.280284 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5ee3fd38-71d5-429f-87d5-1c3556ddff55","Type":"ContainerDied","Data":"374a12a2747820eec4d77bba37f5a06f4de437d712abae0f136be9bb4e1983c5"} Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.280296 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="374a12a2747820eec4d77bba37f5a06f4de437d712abae0f136be9bb4e1983c5" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.280305 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.282212 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"742a3e8c-e69e-4886-9ffd-8129c949ce2c","Type":"ContainerStarted","Data":"d865aba64696c737a9925ef6afdc637d8eeb0d482ea94ab37d2d74bc2256da8d"} Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.356386 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-combined-ca-bundle\") pod \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.356458 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fn97b\" (UniqueName: \"kubernetes.io/projected/5ee3fd38-71d5-429f-87d5-1c3556ddff55-kube-api-access-fn97b\") pod \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.356532 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ee3fd38-71d5-429f-87d5-1c3556ddff55-httpd-run\") pod \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.356666 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.357224 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ee3fd38-71d5-429f-87d5-1c3556ddff55-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5ee3fd38-71d5-429f-87d5-1c3556ddff55" (UID: "5ee3fd38-71d5-429f-87d5-1c3556ddff55"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.357287 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-scripts\") pod \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.357340 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-internal-tls-certs\") pod \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.357411 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ee3fd38-71d5-429f-87d5-1c3556ddff55-logs\") pod \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.357705 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-config-data\") pod \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\" (UID: \"5ee3fd38-71d5-429f-87d5-1c3556ddff55\") " Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.357765 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ee3fd38-71d5-429f-87d5-1c3556ddff55-logs" (OuterVolumeSpecName: "logs") pod "5ee3fd38-71d5-429f-87d5-1c3556ddff55" (UID: "5ee3fd38-71d5-429f-87d5-1c3556ddff55"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.358247 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ee3fd38-71d5-429f-87d5-1c3556ddff55-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.358261 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ee3fd38-71d5-429f-87d5-1c3556ddff55-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.364810 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ee3fd38-71d5-429f-87d5-1c3556ddff55-kube-api-access-fn97b" (OuterVolumeSpecName: "kube-api-access-fn97b") pod "5ee3fd38-71d5-429f-87d5-1c3556ddff55" (UID: "5ee3fd38-71d5-429f-87d5-1c3556ddff55"). InnerVolumeSpecName "kube-api-access-fn97b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.365815 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-scripts" (OuterVolumeSpecName: "scripts") pod "5ee3fd38-71d5-429f-87d5-1c3556ddff55" (UID: "5ee3fd38-71d5-429f-87d5-1c3556ddff55"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.389099 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854" (OuterVolumeSpecName: "glance") pod "5ee3fd38-71d5-429f-87d5-1c3556ddff55" (UID: "5ee3fd38-71d5-429f-87d5-1c3556ddff55"). InnerVolumeSpecName "pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.413177 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ee3fd38-71d5-429f-87d5-1c3556ddff55" (UID: "5ee3fd38-71d5-429f-87d5-1c3556ddff55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.432265 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-config-data" (OuterVolumeSpecName: "config-data") pod "5ee3fd38-71d5-429f-87d5-1c3556ddff55" (UID: "5ee3fd38-71d5-429f-87d5-1c3556ddff55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.437601 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5ee3fd38-71d5-429f-87d5-1c3556ddff55" (UID: "5ee3fd38-71d5-429f-87d5-1c3556ddff55"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.459868 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") on node \"crc\" " Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.459898 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.459909 4755 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.459921 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.459932 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ee3fd38-71d5-429f-87d5-1c3556ddff55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.459940 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fn97b\" (UniqueName: \"kubernetes.io/projected/5ee3fd38-71d5-429f-87d5-1c3556ddff55-kube-api-access-fn97b\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.495093 4755 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.495275 4755 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854") on node "crc" Feb 02 22:55:20 crc kubenswrapper[4755]: I0202 22:55:20.567949 4755 reconciler_common.go:293] "Volume detached for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.101804 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dcd8408-2bd4-4839-8e88-645b590d7f84" path="/var/lib/kubelet/pods/6dcd8408-2bd4-4839-8e88-645b590d7f84/volumes" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.315381 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.316652 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"742a3e8c-e69e-4886-9ffd-8129c949ce2c","Type":"ContainerStarted","Data":"0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1"} Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.368486 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.377909 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.388215 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:55:21 crc kubenswrapper[4755]: E0202 22:55:21.388622 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ee3fd38-71d5-429f-87d5-1c3556ddff55" containerName="glance-log" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.388640 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ee3fd38-71d5-429f-87d5-1c3556ddff55" containerName="glance-log" Feb 02 22:55:21 crc kubenswrapper[4755]: E0202 22:55:21.388653 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ee3fd38-71d5-429f-87d5-1c3556ddff55" containerName="glance-httpd" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.388659 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ee3fd38-71d5-429f-87d5-1c3556ddff55" containerName="glance-httpd" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.388875 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ee3fd38-71d5-429f-87d5-1c3556ddff55" containerName="glance-httpd" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.388896 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ee3fd38-71d5-429f-87d5-1c3556ddff55" containerName="glance-log" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.389978 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.404236 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.404425 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.407395 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.509050 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c76c588-a947-484d-88e4-4fe526e1ffb4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.509106 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9p5jc\" (UniqueName: \"kubernetes.io/projected/5c76c588-a947-484d-88e4-4fe526e1ffb4-kube-api-access-9p5jc\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.509153 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.509224 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c76c588-a947-484d-88e4-4fe526e1ffb4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.509267 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c76c588-a947-484d-88e4-4fe526e1ffb4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.509327 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c76c588-a947-484d-88e4-4fe526e1ffb4-logs\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.509409 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c76c588-a947-484d-88e4-4fe526e1ffb4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.509437 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c76c588-a947-484d-88e4-4fe526e1ffb4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.611411 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c76c588-a947-484d-88e4-4fe526e1ffb4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.611458 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9p5jc\" (UniqueName: \"kubernetes.io/projected/5c76c588-a947-484d-88e4-4fe526e1ffb4-kube-api-access-9p5jc\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.611497 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.611548 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c76c588-a947-484d-88e4-4fe526e1ffb4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.611578 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c76c588-a947-484d-88e4-4fe526e1ffb4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.611613 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c76c588-a947-484d-88e4-4fe526e1ffb4-logs\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.611664 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c76c588-a947-484d-88e4-4fe526e1ffb4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.611684 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c76c588-a947-484d-88e4-4fe526e1ffb4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.612078 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c76c588-a947-484d-88e4-4fe526e1ffb4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.612317 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c76c588-a947-484d-88e4-4fe526e1ffb4-logs\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.617591 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c76c588-a947-484d-88e4-4fe526e1ffb4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.618907 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c76c588-a947-484d-88e4-4fe526e1ffb4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.619413 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.619438 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/12ae2e05c1109de2cfa2799707fdeed95eed1c35304f00e5efa94d1e550db555/globalmount\"" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.620272 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c76c588-a947-484d-88e4-4fe526e1ffb4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.620306 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c76c588-a947-484d-88e4-4fe526e1ffb4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.638062 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9p5jc\" (UniqueName: \"kubernetes.io/projected/5c76c588-a947-484d-88e4-4fe526e1ffb4-kube-api-access-9p5jc\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.711675 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1fa1cc63-77d5-419e-9a48-75f8920bd854\") pod \"glance-default-internal-api-0\" (UID: \"5c76c588-a947-484d-88e4-4fe526e1ffb4\") " pod="openstack/glance-default-internal-api-0" Feb 02 22:55:21 crc kubenswrapper[4755]: I0202 22:55:21.720371 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.285093 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.325703 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-combined-ca-bundle\") pod \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.325760 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-scripts\") pod \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.325812 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-public-tls-certs\") pod \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.325902 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-logs\") pod \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.325958 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-httpd-run\") pod \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.326038 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.326058 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjdlh\" (UniqueName: \"kubernetes.io/projected/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-kube-api-access-vjdlh\") pod \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.326105 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-config-data\") pod \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\" (UID: \"ae1efe99-ce8f-4ef7-b641-dd666e3864ea\") " Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.331319 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-logs" (OuterVolumeSpecName: "logs") pod "ae1efe99-ce8f-4ef7-b641-dd666e3864ea" (UID: "ae1efe99-ce8f-4ef7-b641-dd666e3864ea"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.331483 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ae1efe99-ce8f-4ef7-b641-dd666e3864ea" (UID: "ae1efe99-ce8f-4ef7-b641-dd666e3864ea"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.341497 4755 generic.go:334] "Generic (PLEG): container finished" podID="ae1efe99-ce8f-4ef7-b641-dd666e3864ea" containerID="cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef" exitCode=0 Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.341547 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae1efe99-ce8f-4ef7-b641-dd666e3864ea","Type":"ContainerDied","Data":"cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef"} Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.341577 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.341593 4755 scope.go:117] "RemoveContainer" containerID="cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.341579 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ae1efe99-ce8f-4ef7-b641-dd666e3864ea","Type":"ContainerDied","Data":"a5c100fc2653f744f1c1ba4222dd3ba008493963cd51157e4b296b411a6f5ca7"} Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.345969 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-kube-api-access-vjdlh" (OuterVolumeSpecName: "kube-api-access-vjdlh") pod "ae1efe99-ce8f-4ef7-b641-dd666e3864ea" (UID: "ae1efe99-ce8f-4ef7-b641-dd666e3864ea"). InnerVolumeSpecName "kube-api-access-vjdlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.358466 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-scripts" (OuterVolumeSpecName: "scripts") pod "ae1efe99-ce8f-4ef7-b641-dd666e3864ea" (UID: "ae1efe99-ce8f-4ef7-b641-dd666e3864ea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.371824 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484" (OuterVolumeSpecName: "glance") pod "ae1efe99-ce8f-4ef7-b641-dd666e3864ea" (UID: "ae1efe99-ce8f-4ef7-b641-dd666e3864ea"). InnerVolumeSpecName "pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.389339 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae1efe99-ce8f-4ef7-b641-dd666e3864ea" (UID: "ae1efe99-ce8f-4ef7-b641-dd666e3864ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.416756 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ae1efe99-ce8f-4ef7-b641-dd666e3864ea" (UID: "ae1efe99-ce8f-4ef7-b641-dd666e3864ea"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.422519 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-config-data" (OuterVolumeSpecName: "config-data") pod "ae1efe99-ce8f-4ef7-b641-dd666e3864ea" (UID: "ae1efe99-ce8f-4ef7-b641-dd666e3864ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.428950 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.429088 4755 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.429199 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjdlh\" (UniqueName: \"kubernetes.io/projected/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-kube-api-access-vjdlh\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.429349 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") on node \"crc\" " Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.429554 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.429667 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.429752 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.430546 4755 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae1efe99-ce8f-4ef7-b641-dd666e3864ea-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.458372 4755 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.458502 4755 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484") on node "crc" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.532722 4755 reconciler_common.go:293] "Volume detached for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.726906 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.738930 4755 scope.go:117] "RemoveContainer" containerID="13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.744514 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.759348 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:55:22 crc kubenswrapper[4755]: E0202 22:55:22.759805 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae1efe99-ce8f-4ef7-b641-dd666e3864ea" containerName="glance-httpd" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.759826 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae1efe99-ce8f-4ef7-b641-dd666e3864ea" containerName="glance-httpd" Feb 02 22:55:22 crc kubenswrapper[4755]: E0202 22:55:22.759841 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae1efe99-ce8f-4ef7-b641-dd666e3864ea" containerName="glance-log" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.759847 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae1efe99-ce8f-4ef7-b641-dd666e3864ea" containerName="glance-log" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.760047 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae1efe99-ce8f-4ef7-b641-dd666e3864ea" containerName="glance-httpd" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.760061 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae1efe99-ce8f-4ef7-b641-dd666e3864ea" containerName="glance-log" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.761051 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.766644 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.766846 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.774480 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.810127 4755 scope.go:117] "RemoveContainer" containerID="cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef" Feb 02 22:55:22 crc kubenswrapper[4755]: E0202 22:55:22.814224 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef\": container with ID starting with cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef not found: ID does not exist" containerID="cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.814280 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef"} err="failed to get container status \"cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef\": rpc error: code = NotFound desc = could not find container \"cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef\": container with ID starting with cb79db437a03767f0cf2475ba07a9bacb7eaf38cb3d8a087bbd98b846a7a36ef not found: ID does not exist" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.814313 4755 scope.go:117] "RemoveContainer" containerID="13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb" Feb 02 22:55:22 crc kubenswrapper[4755]: E0202 22:55:22.822811 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb\": container with ID starting with 13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb not found: ID does not exist" containerID="13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.822860 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb"} err="failed to get container status \"13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb\": rpc error: code = NotFound desc = could not find container \"13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb\": container with ID starting with 13b4cf9281394bca2abce68d8d4b38d38861e7bb3d2fcf5e62c099e9222a8edb not found: ID does not exist" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.850969 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97769578-d73a-448d-a806-3296baae6447-config-data\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.851009 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/97769578-d73a-448d-a806-3296baae6447-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.851043 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.851069 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97769578-d73a-448d-a806-3296baae6447-scripts\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.851092 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97769578-d73a-448d-a806-3296baae6447-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.851117 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97769578-d73a-448d-a806-3296baae6447-logs\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.851141 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/97769578-d73a-448d-a806-3296baae6447-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.851192 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8h9s\" (UniqueName: \"kubernetes.io/projected/97769578-d73a-448d-a806-3296baae6447-kube-api-access-g8h9s\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.955371 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/97769578-d73a-448d-a806-3296baae6447-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.955920 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8h9s\" (UniqueName: \"kubernetes.io/projected/97769578-d73a-448d-a806-3296baae6447-kube-api-access-g8h9s\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.957251 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97769578-d73a-448d-a806-3296baae6447-config-data\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.957301 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/97769578-d73a-448d-a806-3296baae6447-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.957349 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.957375 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97769578-d73a-448d-a806-3296baae6447-scripts\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.957405 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97769578-d73a-448d-a806-3296baae6447-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.957452 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97769578-d73a-448d-a806-3296baae6447-logs\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.958030 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97769578-d73a-448d-a806-3296baae6447-logs\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.958377 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/97769578-d73a-448d-a806-3296baae6447-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.969020 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.969063 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/36a0f8b995f8d37a8776a91ffef55e84b7ae73b259c9d13bbc3129ab0c1d828a/globalmount\"" pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.977898 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97769578-d73a-448d-a806-3296baae6447-scripts\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.980069 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/97769578-d73a-448d-a806-3296baae6447-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.980895 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97769578-d73a-448d-a806-3296baae6447-config-data\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:22 crc kubenswrapper[4755]: I0202 22:55:22.999705 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8h9s\" (UniqueName: \"kubernetes.io/projected/97769578-d73a-448d-a806-3296baae6447-kube-api-access-g8h9s\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.002846 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97769578-d73a-448d-a806-3296baae6447-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.047169 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39bbe900-2229-40b5-8aaf-0ed69f1ed484\") pod \"glance-default-external-api-0\" (UID: \"97769578-d73a-448d-a806-3296baae6447\") " pod="openstack/glance-default-external-api-0" Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.085624 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.106125 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ee3fd38-71d5-429f-87d5-1c3556ddff55" path="/var/lib/kubelet/pods/5ee3fd38-71d5-429f-87d5-1c3556ddff55/volumes" Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.125290 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae1efe99-ce8f-4ef7-b641-dd666e3864ea" path="/var/lib/kubelet/pods/ae1efe99-ce8f-4ef7-b641-dd666e3864ea/volumes" Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.354088 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"742a3e8c-e69e-4886-9ffd-8129c949ce2c","Type":"ContainerStarted","Data":"51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce"} Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.432770 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 02 22:55:23 crc kubenswrapper[4755]: W0202 22:55:23.436540 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c76c588_a947_484d_88e4_4fe526e1ffb4.slice/crio-51cb688a5ec6a141d2717f093ec263d6abd68d5c0a6543ba1cd6896332b2c7cf WatchSource:0}: Error finding container 51cb688a5ec6a141d2717f093ec263d6abd68d5c0a6543ba1cd6896332b2c7cf: Status 404 returned error can't find the container with id 51cb688a5ec6a141d2717f093ec263d6abd68d5c0a6543ba1cd6896332b2c7cf Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.834786 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.908781 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jgvwd"] Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.910319 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.912778 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.914501 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-mmzks" Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.915274 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 02 22:55:23 crc kubenswrapper[4755]: I0202 22:55:23.940187 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jgvwd"] Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.004813 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-scripts\") pod \"nova-cell0-conductor-db-sync-jgvwd\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.004889 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-config-data\") pod \"nova-cell0-conductor-db-sync-jgvwd\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.004977 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45jfc\" (UniqueName: \"kubernetes.io/projected/afe1508f-09cc-4874-b8b4-560d879e2e49-kube-api-access-45jfc\") pod \"nova-cell0-conductor-db-sync-jgvwd\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.005041 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jgvwd\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.106861 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jgvwd\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.106918 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-scripts\") pod \"nova-cell0-conductor-db-sync-jgvwd\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.106972 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-config-data\") pod \"nova-cell0-conductor-db-sync-jgvwd\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.107077 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45jfc\" (UniqueName: \"kubernetes.io/projected/afe1508f-09cc-4874-b8b4-560d879e2e49-kube-api-access-45jfc\") pod \"nova-cell0-conductor-db-sync-jgvwd\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.113438 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jgvwd\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.117308 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-config-data\") pod \"nova-cell0-conductor-db-sync-jgvwd\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.122213 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-scripts\") pod \"nova-cell0-conductor-db-sync-jgvwd\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.125319 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45jfc\" (UniqueName: \"kubernetes.io/projected/afe1508f-09cc-4874-b8b4-560d879e2e49-kube-api-access-45jfc\") pod \"nova-cell0-conductor-db-sync-jgvwd\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.274135 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.417390 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"742a3e8c-e69e-4886-9ffd-8129c949ce2c","Type":"ContainerStarted","Data":"a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420"} Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.418598 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"97769578-d73a-448d-a806-3296baae6447","Type":"ContainerStarted","Data":"cd7f7acc3a3fae41fc03b2e9f07bc231802c8e54ba44455a08147c498ad76d97"} Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.430387 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c76c588-a947-484d-88e4-4fe526e1ffb4","Type":"ContainerStarted","Data":"51cb688a5ec6a141d2717f093ec263d6abd68d5c0a6543ba1cd6896332b2c7cf"} Feb 02 22:55:24 crc kubenswrapper[4755]: I0202 22:55:24.865672 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jgvwd"] Feb 02 22:55:25 crc kubenswrapper[4755]: I0202 22:55:25.446211 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c76c588-a947-484d-88e4-4fe526e1ffb4","Type":"ContainerStarted","Data":"a16f4b279a598aa171a26d69a389e5affb9b1361bda02bcfebb0880084b2e34e"} Feb 02 22:55:25 crc kubenswrapper[4755]: I0202 22:55:25.446536 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c76c588-a947-484d-88e4-4fe526e1ffb4","Type":"ContainerStarted","Data":"bdbc0982acc3abe8726a37448f38992e7ad3dd16b90efaa025fff8eb5880d23b"} Feb 02 22:55:25 crc kubenswrapper[4755]: I0202 22:55:25.449193 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"97769578-d73a-448d-a806-3296baae6447","Type":"ContainerStarted","Data":"03458a783c60ed98f494c94c85a52db3b3364c474b61bd8d4b4b8a6fc765894c"} Feb 02 22:55:25 crc kubenswrapper[4755]: I0202 22:55:25.449217 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"97769578-d73a-448d-a806-3296baae6447","Type":"ContainerStarted","Data":"440116b3083473fe93168436dfaa6ec0bab0ed0dc645320dbd7c913e4b69770e"} Feb 02 22:55:25 crc kubenswrapper[4755]: I0202 22:55:25.482525 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jgvwd" event={"ID":"afe1508f-09cc-4874-b8b4-560d879e2e49","Type":"ContainerStarted","Data":"0c7cb3f49ac98c255b0125cbe97a3cd961bda14540ae65c79b50499ca396f68e"} Feb 02 22:55:25 crc kubenswrapper[4755]: I0202 22:55:25.487339 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.487321101 podStartE2EDuration="4.487321101s" podCreationTimestamp="2026-02-02 22:55:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:55:25.478227675 +0000 UTC m=+1281.169448001" watchObservedRunningTime="2026-02-02 22:55:25.487321101 +0000 UTC m=+1281.178541427" Feb 02 22:55:26 crc kubenswrapper[4755]: I0202 22:55:26.232638 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Feb 02 22:55:26 crc kubenswrapper[4755]: I0202 22:55:26.262466 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.262452542 podStartE2EDuration="4.262452542s" podCreationTimestamp="2026-02-02 22:55:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:55:25.524290208 +0000 UTC m=+1281.215510534" watchObservedRunningTime="2026-02-02 22:55:26.262452542 +0000 UTC m=+1281.953672868" Feb 02 22:55:27 crc kubenswrapper[4755]: I0202 22:55:27.509198 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"742a3e8c-e69e-4886-9ffd-8129c949ce2c","Type":"ContainerStarted","Data":"3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea"} Feb 02 22:55:27 crc kubenswrapper[4755]: I0202 22:55:27.509814 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 22:55:27 crc kubenswrapper[4755]: I0202 22:55:27.509390 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="ceilometer-central-agent" containerID="cri-o://0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1" gracePeriod=30 Feb 02 22:55:27 crc kubenswrapper[4755]: I0202 22:55:27.509427 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="sg-core" containerID="cri-o://a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420" gracePeriod=30 Feb 02 22:55:27 crc kubenswrapper[4755]: I0202 22:55:27.509448 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="ceilometer-notification-agent" containerID="cri-o://51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce" gracePeriod=30 Feb 02 22:55:27 crc kubenswrapper[4755]: I0202 22:55:27.509348 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="proxy-httpd" containerID="cri-o://3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea" gracePeriod=30 Feb 02 22:55:28 crc kubenswrapper[4755]: I0202 22:55:28.521705 4755 generic.go:334] "Generic (PLEG): container finished" podID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerID="3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea" exitCode=0 Feb 02 22:55:28 crc kubenswrapper[4755]: I0202 22:55:28.521985 4755 generic.go:334] "Generic (PLEG): container finished" podID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerID="a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420" exitCode=2 Feb 02 22:55:28 crc kubenswrapper[4755]: I0202 22:55:28.521995 4755 generic.go:334] "Generic (PLEG): container finished" podID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerID="51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce" exitCode=0 Feb 02 22:55:28 crc kubenswrapper[4755]: I0202 22:55:28.521847 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"742a3e8c-e69e-4886-9ffd-8129c949ce2c","Type":"ContainerDied","Data":"3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea"} Feb 02 22:55:28 crc kubenswrapper[4755]: I0202 22:55:28.522032 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"742a3e8c-e69e-4886-9ffd-8129c949ce2c","Type":"ContainerDied","Data":"a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420"} Feb 02 22:55:28 crc kubenswrapper[4755]: I0202 22:55:28.522046 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"742a3e8c-e69e-4886-9ffd-8129c949ce2c","Type":"ContainerDied","Data":"51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce"} Feb 02 22:55:31 crc kubenswrapper[4755]: I0202 22:55:31.721338 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:31 crc kubenswrapper[4755]: I0202 22:55:31.721826 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:31 crc kubenswrapper[4755]: I0202 22:55:31.768862 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:31 crc kubenswrapper[4755]: I0202 22:55:31.779859 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:31 crc kubenswrapper[4755]: I0202 22:55:31.792670 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=6.440216219 podStartE2EDuration="12.792651845s" podCreationTimestamp="2026-02-02 22:55:19 +0000 UTC" firstStartedPulling="2026-02-02 22:55:20.199368235 +0000 UTC m=+1275.890588551" lastFinishedPulling="2026-02-02 22:55:26.551803851 +0000 UTC m=+1282.243024177" observedRunningTime="2026-02-02 22:55:27.537417498 +0000 UTC m=+1283.228637824" watchObservedRunningTime="2026-02-02 22:55:31.792651845 +0000 UTC m=+1287.483872171" Feb 02 22:55:32 crc kubenswrapper[4755]: I0202 22:55:32.570982 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:32 crc kubenswrapper[4755]: I0202 22:55:32.571246 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:33 crc kubenswrapper[4755]: I0202 22:55:33.091240 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 22:55:33 crc kubenswrapper[4755]: I0202 22:55:33.092846 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 02 22:55:33 crc kubenswrapper[4755]: I0202 22:55:33.157171 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 22:55:33 crc kubenswrapper[4755]: I0202 22:55:33.157245 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 02 22:55:33 crc kubenswrapper[4755]: I0202 22:55:33.586886 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jgvwd" event={"ID":"afe1508f-09cc-4874-b8b4-560d879e2e49","Type":"ContainerStarted","Data":"27b22a6641cce1d2838824643dbf5e80c6fad787fa7acc3bd3a4b01aecee1e92"} Feb 02 22:55:33 crc kubenswrapper[4755]: I0202 22:55:33.587383 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 22:55:33 crc kubenswrapper[4755]: I0202 22:55:33.587544 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 02 22:55:33 crc kubenswrapper[4755]: I0202 22:55:33.608468 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-jgvwd" podStartSLOduration=2.173834834 podStartE2EDuration="10.608452289s" podCreationTimestamp="2026-02-02 22:55:23 +0000 UTC" firstStartedPulling="2026-02-02 22:55:24.882709584 +0000 UTC m=+1280.573929910" lastFinishedPulling="2026-02-02 22:55:33.317327029 +0000 UTC m=+1289.008547365" observedRunningTime="2026-02-02 22:55:33.598760637 +0000 UTC m=+1289.289980983" watchObservedRunningTime="2026-02-02 22:55:33.608452289 +0000 UTC m=+1289.299672615" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.063270 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.126797 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/742a3e8c-e69e-4886-9ffd-8129c949ce2c-log-httpd\") pod \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.126880 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-scripts\") pod \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.126911 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-combined-ca-bundle\") pod \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.126940 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zjmj\" (UniqueName: \"kubernetes.io/projected/742a3e8c-e69e-4886-9ffd-8129c949ce2c-kube-api-access-2zjmj\") pod \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.127019 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/742a3e8c-e69e-4886-9ffd-8129c949ce2c-run-httpd\") pod \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.127041 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-config-data\") pod \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.127056 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-sg-core-conf-yaml\") pod \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\" (UID: \"742a3e8c-e69e-4886-9ffd-8129c949ce2c\") " Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.127389 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/742a3e8c-e69e-4886-9ffd-8129c949ce2c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "742a3e8c-e69e-4886-9ffd-8129c949ce2c" (UID: "742a3e8c-e69e-4886-9ffd-8129c949ce2c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.128145 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/742a3e8c-e69e-4886-9ffd-8129c949ce2c-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.129483 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/742a3e8c-e69e-4886-9ffd-8129c949ce2c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "742a3e8c-e69e-4886-9ffd-8129c949ce2c" (UID: "742a3e8c-e69e-4886-9ffd-8129c949ce2c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.136870 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-scripts" (OuterVolumeSpecName: "scripts") pod "742a3e8c-e69e-4886-9ffd-8129c949ce2c" (UID: "742a3e8c-e69e-4886-9ffd-8129c949ce2c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.137025 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/742a3e8c-e69e-4886-9ffd-8129c949ce2c-kube-api-access-2zjmj" (OuterVolumeSpecName: "kube-api-access-2zjmj") pod "742a3e8c-e69e-4886-9ffd-8129c949ce2c" (UID: "742a3e8c-e69e-4886-9ffd-8129c949ce2c"). InnerVolumeSpecName "kube-api-access-2zjmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.159086 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "742a3e8c-e69e-4886-9ffd-8129c949ce2c" (UID: "742a3e8c-e69e-4886-9ffd-8129c949ce2c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.214112 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "742a3e8c-e69e-4886-9ffd-8129c949ce2c" (UID: "742a3e8c-e69e-4886-9ffd-8129c949ce2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.230480 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.230517 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zjmj\" (UniqueName: \"kubernetes.io/projected/742a3e8c-e69e-4886-9ffd-8129c949ce2c-kube-api-access-2zjmj\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.230533 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/742a3e8c-e69e-4886-9ffd-8129c949ce2c-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.230547 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.230557 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.242069 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-config-data" (OuterVolumeSpecName: "config-data") pod "742a3e8c-e69e-4886-9ffd-8129c949ce2c" (UID: "742a3e8c-e69e-4886-9ffd-8129c949ce2c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.332298 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/742a3e8c-e69e-4886-9ffd-8129c949ce2c-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.597140 4755 generic.go:334] "Generic (PLEG): container finished" podID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerID="0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1" exitCode=0 Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.598432 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.607862 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"742a3e8c-e69e-4886-9ffd-8129c949ce2c","Type":"ContainerDied","Data":"0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1"} Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.607896 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"742a3e8c-e69e-4886-9ffd-8129c949ce2c","Type":"ContainerDied","Data":"d865aba64696c737a9925ef6afdc637d8eeb0d482ea94ab37d2d74bc2256da8d"} Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.608136 4755 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.608168 4755 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.608195 4755 scope.go:117] "RemoveContainer" containerID="3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.636206 4755 scope.go:117] "RemoveContainer" containerID="a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.642520 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.667007 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.675484 4755 scope.go:117] "RemoveContainer" containerID="51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.678127 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:34 crc kubenswrapper[4755]: E0202 22:55:34.678542 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="ceilometer-notification-agent" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.678554 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="ceilometer-notification-agent" Feb 02 22:55:34 crc kubenswrapper[4755]: E0202 22:55:34.678571 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="ceilometer-central-agent" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.678577 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="ceilometer-central-agent" Feb 02 22:55:34 crc kubenswrapper[4755]: E0202 22:55:34.678586 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="proxy-httpd" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.678592 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="proxy-httpd" Feb 02 22:55:34 crc kubenswrapper[4755]: E0202 22:55:34.678603 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="sg-core" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.678611 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="sg-core" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.678791 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="ceilometer-notification-agent" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.678805 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="proxy-httpd" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.678826 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="ceilometer-central-agent" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.678841 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" containerName="sg-core" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.680587 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.691831 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.695520 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.695768 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.726686 4755 scope.go:117] "RemoveContainer" containerID="0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.740891 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.741034 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ec8849f-772e-43ac-97de-28bc9169a1b4-run-httpd\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.741102 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-config-data\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.741121 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.741145 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ec8849f-772e-43ac-97de-28bc9169a1b4-log-httpd\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.741246 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-scripts\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.741343 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt6qn\" (UniqueName: \"kubernetes.io/projected/3ec8849f-772e-43ac-97de-28bc9169a1b4-kube-api-access-gt6qn\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.746310 4755 scope.go:117] "RemoveContainer" containerID="3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea" Feb 02 22:55:34 crc kubenswrapper[4755]: E0202 22:55:34.746688 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea\": container with ID starting with 3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea not found: ID does not exist" containerID="3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.746739 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea"} err="failed to get container status \"3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea\": rpc error: code = NotFound desc = could not find container \"3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea\": container with ID starting with 3dc419b8696bb76c41e44ee26348b526f0af9b6667b2a37df737c88895e046ea not found: ID does not exist" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.746765 4755 scope.go:117] "RemoveContainer" containerID="a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420" Feb 02 22:55:34 crc kubenswrapper[4755]: E0202 22:55:34.750835 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420\": container with ID starting with a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420 not found: ID does not exist" containerID="a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.750865 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420"} err="failed to get container status \"a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420\": rpc error: code = NotFound desc = could not find container \"a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420\": container with ID starting with a040dd1413d768cb1eb54d269de8e0affffb801b1f42c8a50cb7b8ab1963e420 not found: ID does not exist" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.750896 4755 scope.go:117] "RemoveContainer" containerID="51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce" Feb 02 22:55:34 crc kubenswrapper[4755]: E0202 22:55:34.751177 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce\": container with ID starting with 51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce not found: ID does not exist" containerID="51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.751203 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce"} err="failed to get container status \"51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce\": rpc error: code = NotFound desc = could not find container \"51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce\": container with ID starting with 51bc3033ccc20fa6ba8da8d1e4fdf0f33fd04b9a5cff08fc0ae50adac894efce not found: ID does not exist" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.751218 4755 scope.go:117] "RemoveContainer" containerID="0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1" Feb 02 22:55:34 crc kubenswrapper[4755]: E0202 22:55:34.751489 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1\": container with ID starting with 0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1 not found: ID does not exist" containerID="0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.751508 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1"} err="failed to get container status \"0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1\": rpc error: code = NotFound desc = could not find container \"0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1\": container with ID starting with 0d77e5108dd07d3fccb82dd584c95bd1487440beccaa56ba19f623638dd41fb1 not found: ID does not exist" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.842848 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-config-data\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.842910 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.842935 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ec8849f-772e-43ac-97de-28bc9169a1b4-log-httpd\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.842995 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-scripts\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.843092 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt6qn\" (UniqueName: \"kubernetes.io/projected/3ec8849f-772e-43ac-97de-28bc9169a1b4-kube-api-access-gt6qn\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.843138 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.843224 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ec8849f-772e-43ac-97de-28bc9169a1b4-run-httpd\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.843673 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ec8849f-772e-43ac-97de-28bc9169a1b4-run-httpd\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.843979 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ec8849f-772e-43ac-97de-28bc9169a1b4-log-httpd\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.848381 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-config-data\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.848881 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-scripts\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.852381 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.863892 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:34 crc kubenswrapper[4755]: I0202 22:55:34.875491 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt6qn\" (UniqueName: \"kubernetes.io/projected/3ec8849f-772e-43ac-97de-28bc9169a1b4-kube-api-access-gt6qn\") pod \"ceilometer-0\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " pod="openstack/ceilometer-0" Feb 02 22:55:35 crc kubenswrapper[4755]: I0202 22:55:35.018303 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:55:35 crc kubenswrapper[4755]: I0202 22:55:35.081437 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="742a3e8c-e69e-4886-9ffd-8129c949ce2c" path="/var/lib/kubelet/pods/742a3e8c-e69e-4886-9ffd-8129c949ce2c/volumes" Feb 02 22:55:35 crc kubenswrapper[4755]: I0202 22:55:35.360113 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:35 crc kubenswrapper[4755]: I0202 22:55:35.405102 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 02 22:55:35 crc kubenswrapper[4755]: I0202 22:55:35.536789 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:55:35 crc kubenswrapper[4755]: I0202 22:55:35.604359 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 22:55:35 crc kubenswrapper[4755]: I0202 22:55:35.610811 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 02 22:55:35 crc kubenswrapper[4755]: I0202 22:55:35.611456 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ec8849f-772e-43ac-97de-28bc9169a1b4","Type":"ContainerStarted","Data":"f5e05ed044adc5259bae7751da2a3f3e30058981022538524ba1fcc8531fcf80"} Feb 02 22:55:36 crc kubenswrapper[4755]: I0202 22:55:36.686884 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ec8849f-772e-43ac-97de-28bc9169a1b4","Type":"ContainerStarted","Data":"30b6a55eada77e771fb020e08e7dfbe3b5a58ff1fe0fb4806ecfe3a89c04dec1"} Feb 02 22:55:37 crc kubenswrapper[4755]: I0202 22:55:37.698068 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ec8849f-772e-43ac-97de-28bc9169a1b4","Type":"ContainerStarted","Data":"3ff14072feabf6d64dcbbacef56e7e4b1e81eeda549adcbde8a78cf161418ae9"} Feb 02 22:55:37 crc kubenswrapper[4755]: I0202 22:55:37.698521 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ec8849f-772e-43ac-97de-28bc9169a1b4","Type":"ContainerStarted","Data":"e9ec323764d38671ec6dfd13409f793ebde8cdf1845ab329bfa049b4b9bfef38"} Feb 02 22:55:39 crc kubenswrapper[4755]: I0202 22:55:39.721107 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ec8849f-772e-43ac-97de-28bc9169a1b4","Type":"ContainerStarted","Data":"ce1a04046ba5b98ff52ef6409f02410d58218db03cdf0393209a055714c31ee0"} Feb 02 22:55:39 crc kubenswrapper[4755]: I0202 22:55:39.721764 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 22:55:39 crc kubenswrapper[4755]: I0202 22:55:39.749615 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.906137333 podStartE2EDuration="5.749599295s" podCreationTimestamp="2026-02-02 22:55:34 +0000 UTC" firstStartedPulling="2026-02-02 22:55:35.517048245 +0000 UTC m=+1291.208268581" lastFinishedPulling="2026-02-02 22:55:39.360510217 +0000 UTC m=+1295.051730543" observedRunningTime="2026-02-02 22:55:39.745008606 +0000 UTC m=+1295.436228972" watchObservedRunningTime="2026-02-02 22:55:39.749599295 +0000 UTC m=+1295.440819621" Feb 02 22:55:44 crc kubenswrapper[4755]: I0202 22:55:44.786119 4755 generic.go:334] "Generic (PLEG): container finished" podID="afe1508f-09cc-4874-b8b4-560d879e2e49" containerID="27b22a6641cce1d2838824643dbf5e80c6fad787fa7acc3bd3a4b01aecee1e92" exitCode=0 Feb 02 22:55:44 crc kubenswrapper[4755]: I0202 22:55:44.786163 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jgvwd" event={"ID":"afe1508f-09cc-4874-b8b4-560d879e2e49","Type":"ContainerDied","Data":"27b22a6641cce1d2838824643dbf5e80c6fad787fa7acc3bd3a4b01aecee1e92"} Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.262233 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.424222 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-combined-ca-bundle\") pod \"afe1508f-09cc-4874-b8b4-560d879e2e49\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.424305 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-scripts\") pod \"afe1508f-09cc-4874-b8b4-560d879e2e49\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.424346 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45jfc\" (UniqueName: \"kubernetes.io/projected/afe1508f-09cc-4874-b8b4-560d879e2e49-kube-api-access-45jfc\") pod \"afe1508f-09cc-4874-b8b4-560d879e2e49\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.424465 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-config-data\") pod \"afe1508f-09cc-4874-b8b4-560d879e2e49\" (UID: \"afe1508f-09cc-4874-b8b4-560d879e2e49\") " Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.430363 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-scripts" (OuterVolumeSpecName: "scripts") pod "afe1508f-09cc-4874-b8b4-560d879e2e49" (UID: "afe1508f-09cc-4874-b8b4-560d879e2e49"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.432380 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afe1508f-09cc-4874-b8b4-560d879e2e49-kube-api-access-45jfc" (OuterVolumeSpecName: "kube-api-access-45jfc") pod "afe1508f-09cc-4874-b8b4-560d879e2e49" (UID: "afe1508f-09cc-4874-b8b4-560d879e2e49"). InnerVolumeSpecName "kube-api-access-45jfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.462639 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-config-data" (OuterVolumeSpecName: "config-data") pod "afe1508f-09cc-4874-b8b4-560d879e2e49" (UID: "afe1508f-09cc-4874-b8b4-560d879e2e49"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.464076 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "afe1508f-09cc-4874-b8b4-560d879e2e49" (UID: "afe1508f-09cc-4874-b8b4-560d879e2e49"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.527467 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45jfc\" (UniqueName: \"kubernetes.io/projected/afe1508f-09cc-4874-b8b4-560d879e2e49-kube-api-access-45jfc\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.527530 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.527551 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.527568 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afe1508f-09cc-4874-b8b4-560d879e2e49-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.813385 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jgvwd" event={"ID":"afe1508f-09cc-4874-b8b4-560d879e2e49","Type":"ContainerDied","Data":"0c7cb3f49ac98c255b0125cbe97a3cd961bda14540ae65c79b50499ca396f68e"} Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.813423 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c7cb3f49ac98c255b0125cbe97a3cd961bda14540ae65c79b50499ca396f68e" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.813496 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jgvwd" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.945187 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 22:55:46 crc kubenswrapper[4755]: E0202 22:55:46.946059 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe1508f-09cc-4874-b8b4-560d879e2e49" containerName="nova-cell0-conductor-db-sync" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.946082 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe1508f-09cc-4874-b8b4-560d879e2e49" containerName="nova-cell0-conductor-db-sync" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.946610 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="afe1508f-09cc-4874-b8b4-560d879e2e49" containerName="nova-cell0-conductor-db-sync" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.947905 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.951045 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-mmzks" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.954699 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 02 22:55:46 crc kubenswrapper[4755]: I0202 22:55:46.985653 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 22:55:47 crc kubenswrapper[4755]: I0202 22:55:47.051709 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3895493-b282-4909-ad12-f4d6171695f2-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e3895493-b282-4909-ad12-f4d6171695f2\") " pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:47 crc kubenswrapper[4755]: I0202 22:55:47.051908 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3895493-b282-4909-ad12-f4d6171695f2-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e3895493-b282-4909-ad12-f4d6171695f2\") " pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:47 crc kubenswrapper[4755]: I0202 22:55:47.051957 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftdmf\" (UniqueName: \"kubernetes.io/projected/e3895493-b282-4909-ad12-f4d6171695f2-kube-api-access-ftdmf\") pod \"nova-cell0-conductor-0\" (UID: \"e3895493-b282-4909-ad12-f4d6171695f2\") " pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:47 crc kubenswrapper[4755]: I0202 22:55:47.153637 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3895493-b282-4909-ad12-f4d6171695f2-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e3895493-b282-4909-ad12-f4d6171695f2\") " pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:47 crc kubenswrapper[4755]: I0202 22:55:47.153772 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftdmf\" (UniqueName: \"kubernetes.io/projected/e3895493-b282-4909-ad12-f4d6171695f2-kube-api-access-ftdmf\") pod \"nova-cell0-conductor-0\" (UID: \"e3895493-b282-4909-ad12-f4d6171695f2\") " pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:47 crc kubenswrapper[4755]: I0202 22:55:47.153983 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3895493-b282-4909-ad12-f4d6171695f2-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e3895493-b282-4909-ad12-f4d6171695f2\") " pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:47 crc kubenswrapper[4755]: I0202 22:55:47.159253 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3895493-b282-4909-ad12-f4d6171695f2-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e3895493-b282-4909-ad12-f4d6171695f2\") " pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:47 crc kubenswrapper[4755]: I0202 22:55:47.163046 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3895493-b282-4909-ad12-f4d6171695f2-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e3895493-b282-4909-ad12-f4d6171695f2\") " pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:47 crc kubenswrapper[4755]: I0202 22:55:47.173386 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftdmf\" (UniqueName: \"kubernetes.io/projected/e3895493-b282-4909-ad12-f4d6171695f2-kube-api-access-ftdmf\") pod \"nova-cell0-conductor-0\" (UID: \"e3895493-b282-4909-ad12-f4d6171695f2\") " pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:47 crc kubenswrapper[4755]: I0202 22:55:47.275558 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:47 crc kubenswrapper[4755]: I0202 22:55:47.780792 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 02 22:55:47 crc kubenswrapper[4755]: I0202 22:55:47.822847 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e3895493-b282-4909-ad12-f4d6171695f2","Type":"ContainerStarted","Data":"d5b7b30ca558653335501713c373c32984e790c79c30ee2ba7b1114673469712"} Feb 02 22:55:48 crc kubenswrapper[4755]: I0202 22:55:48.834020 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e3895493-b282-4909-ad12-f4d6171695f2","Type":"ContainerStarted","Data":"a9fe107553a1d164dd3cf075528caa60a97477f2cfa48b5e8227fa980632be8b"} Feb 02 22:55:48 crc kubenswrapper[4755]: I0202 22:55:48.835524 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.351424 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.372306 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=6.3722880889999995 podStartE2EDuration="6.372288089s" podCreationTimestamp="2026-02-02 22:55:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:55:48.862487651 +0000 UTC m=+1304.553707977" watchObservedRunningTime="2026-02-02 22:55:52.372288089 +0000 UTC m=+1308.063508405" Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.809858 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-8cxf8"] Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.811095 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.814445 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.815281 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.827576 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-8cxf8"] Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.970087 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.971676 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.972323 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-8cxf8\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.972413 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-config-data\") pod \"nova-cell0-cell-mapping-8cxf8\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.972443 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-scripts\") pod \"nova-cell0-cell-mapping-8cxf8\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.972612 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkpw2\" (UniqueName: \"kubernetes.io/projected/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-kube-api-access-vkpw2\") pod \"nova-cell0-cell-mapping-8cxf8\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.974385 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 22:55:52 crc kubenswrapper[4755]: I0202 22:55:52.994114 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.074815 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/becf5f49-d63e-4f4f-ae19-22cc57440465-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.074893 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/becf5f49-d63e-4f4f-ae19-22cc57440465-config-data\") pod \"nova-api-0\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.074930 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-config-data\") pod \"nova-cell0-cell-mapping-8cxf8\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.074952 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/becf5f49-d63e-4f4f-ae19-22cc57440465-logs\") pod \"nova-api-0\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.074995 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-scripts\") pod \"nova-cell0-cell-mapping-8cxf8\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.075042 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkpw2\" (UniqueName: \"kubernetes.io/projected/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-kube-api-access-vkpw2\") pod \"nova-cell0-cell-mapping-8cxf8\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.075103 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bklwn\" (UniqueName: \"kubernetes.io/projected/becf5f49-d63e-4f4f-ae19-22cc57440465-kube-api-access-bklwn\") pod \"nova-api-0\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.075146 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-8cxf8\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.111481 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.112126 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-scripts\") pod \"nova-cell0-cell-mapping-8cxf8\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.113032 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.113302 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-8cxf8\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.113682 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-config-data\") pod \"nova-cell0-cell-mapping-8cxf8\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.116356 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkpw2\" (UniqueName: \"kubernetes.io/projected/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-kube-api-access-vkpw2\") pod \"nova-cell0-cell-mapping-8cxf8\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.118117 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.142393 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.172154 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.177089 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bklwn\" (UniqueName: \"kubernetes.io/projected/becf5f49-d63e-4f4f-ae19-22cc57440465-kube-api-access-bklwn\") pod \"nova-api-0\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.177149 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/becf5f49-d63e-4f4f-ae19-22cc57440465-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.177201 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/becf5f49-d63e-4f4f-ae19-22cc57440465-config-data\") pod \"nova-api-0\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.177255 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/becf5f49-d63e-4f4f-ae19-22cc57440465-logs\") pod \"nova-api-0\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.183080 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/becf5f49-d63e-4f4f-ae19-22cc57440465-logs\") pod \"nova-api-0\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.185058 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/becf5f49-d63e-4f4f-ae19-22cc57440465-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.188200 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/becf5f49-d63e-4f4f-ae19-22cc57440465-config-data\") pod \"nova-api-0\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.197390 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.198984 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.207465 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.253202 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bklwn\" (UniqueName: \"kubernetes.io/projected/becf5f49-d63e-4f4f-ae19-22cc57440465-kube-api-access-bklwn\") pod \"nova-api-0\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.278605 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf32346-b983-4ad6-8e31-d4845e843f41-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3bf32346-b983-4ad6-8e31-d4845e843f41\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.278684 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlxm6\" (UniqueName: \"kubernetes.io/projected/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-kube-api-access-dlxm6\") pod \"nova-metadata-0\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.278766 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-logs\") pod \"nova-metadata-0\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.278865 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77cr8\" (UniqueName: \"kubernetes.io/projected/3bf32346-b983-4ad6-8e31-d4845e843f41-kube-api-access-77cr8\") pod \"nova-cell1-novncproxy-0\" (UID: \"3bf32346-b983-4ad6-8e31-d4845e843f41\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.278884 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bf32346-b983-4ad6-8e31-d4845e843f41-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3bf32346-b983-4ad6-8e31-d4845e843f41\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.278952 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-config-data\") pod \"nova-metadata-0\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.278972 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.286020 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.287494 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.288625 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.290711 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.328223 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.357088 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.372350 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-884c8b8f5-sqb6n"] Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.374445 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.381482 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bvl4\" (UniqueName: \"kubernetes.io/projected/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-kube-api-access-8bvl4\") pod \"nova-scheduler-0\" (UID: \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\") " pod="openstack/nova-scheduler-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.381545 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-config-data\") pod \"nova-metadata-0\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.381569 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.381616 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf32346-b983-4ad6-8e31-d4845e843f41-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3bf32346-b983-4ad6-8e31-d4845e843f41\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.381667 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlxm6\" (UniqueName: \"kubernetes.io/projected/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-kube-api-access-dlxm6\") pod \"nova-metadata-0\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.381790 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-logs\") pod \"nova-metadata-0\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.381849 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\") " pod="openstack/nova-scheduler-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.381938 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-config-data\") pod \"nova-scheduler-0\" (UID: \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\") " pod="openstack/nova-scheduler-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.381969 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77cr8\" (UniqueName: \"kubernetes.io/projected/3bf32346-b983-4ad6-8e31-d4845e843f41-kube-api-access-77cr8\") pod \"nova-cell1-novncproxy-0\" (UID: \"3bf32346-b983-4ad6-8e31-d4845e843f41\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.381996 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bf32346-b983-4ad6-8e31-d4845e843f41-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3bf32346-b983-4ad6-8e31-d4845e843f41\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.385900 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf32346-b983-4ad6-8e31-d4845e843f41-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3bf32346-b983-4ad6-8e31-d4845e843f41\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.407034 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-logs\") pod \"nova-metadata-0\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.424979 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlxm6\" (UniqueName: \"kubernetes.io/projected/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-kube-api-access-dlxm6\") pod \"nova-metadata-0\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.438281 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bf32346-b983-4ad6-8e31-d4845e843f41-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3bf32346-b983-4ad6-8e31-d4845e843f41\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.443239 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-config-data\") pod \"nova-metadata-0\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.467907 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-884c8b8f5-sqb6n"] Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.468369 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.488107 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77cr8\" (UniqueName: \"kubernetes.io/projected/3bf32346-b983-4ad6-8e31-d4845e843f41-kube-api-access-77cr8\") pod \"nova-cell1-novncproxy-0\" (UID: \"3bf32346-b983-4ad6-8e31-d4845e843f41\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.489327 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.527541 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-config-data\") pod \"nova-scheduler-0\" (UID: \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\") " pod="openstack/nova-scheduler-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.527643 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-config\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.527666 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bvl4\" (UniqueName: \"kubernetes.io/projected/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-kube-api-access-8bvl4\") pod \"nova-scheduler-0\" (UID: \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\") " pod="openstack/nova-scheduler-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.527753 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-dns-svc\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.527770 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-dns-swift-storage-0\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.527796 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-ovsdbserver-sb\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.527835 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\") " pod="openstack/nova-scheduler-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.527850 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-ovsdbserver-nb\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.527907 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqwlw\" (UniqueName: \"kubernetes.io/projected/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-kube-api-access-qqwlw\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.532393 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\") " pod="openstack/nova-scheduler-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.538378 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-config-data\") pod \"nova-scheduler-0\" (UID: \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\") " pod="openstack/nova-scheduler-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.545571 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bvl4\" (UniqueName: \"kubernetes.io/projected/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-kube-api-access-8bvl4\") pod \"nova-scheduler-0\" (UID: \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\") " pod="openstack/nova-scheduler-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.568246 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.635476 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-ovsdbserver-nb\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.635693 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqwlw\" (UniqueName: \"kubernetes.io/projected/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-kube-api-access-qqwlw\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.635896 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-config\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.636043 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-dns-svc\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.636138 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-dns-swift-storage-0\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.636173 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-ovsdbserver-sb\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.658650 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-dns-svc\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.668525 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-ovsdbserver-sb\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.674150 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-ovsdbserver-nb\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.675762 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-dns-swift-storage-0\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.675905 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-config\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.676790 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqwlw\" (UniqueName: \"kubernetes.io/projected/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-kube-api-access-qqwlw\") pod \"dnsmasq-dns-884c8b8f5-sqb6n\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.708035 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.899262 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:53 crc kubenswrapper[4755]: I0202 22:55:53.946598 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-8cxf8"] Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.227187 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:55:54 crc kubenswrapper[4755]: W0202 22:55:54.235859 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbecf5f49_d63e_4f4f_ae19_22cc57440465.slice/crio-e5bc3d63229d2afacd6d0c9d63acc1c125984e661a83a3c2b3830689622e371b WatchSource:0}: Error finding container e5bc3d63229d2afacd6d0c9d63acc1c125984e661a83a3c2b3830689622e371b: Status 404 returned error can't find the container with id e5bc3d63229d2afacd6d0c9d63acc1c125984e661a83a3c2b3830689622e371b Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.399390 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.418777 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.575781 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.606412 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-884c8b8f5-sqb6n"] Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.768497 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rnblj"] Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.769859 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.773299 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.773782 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.783236 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rnblj"] Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.876958 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-scripts\") pod \"nova-cell1-conductor-db-sync-rnblj\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.877032 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8wgr\" (UniqueName: \"kubernetes.io/projected/a2940cf3-bd51-4393-8e05-6af05f9131ad-kube-api-access-x8wgr\") pod \"nova-cell1-conductor-db-sync-rnblj\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.877110 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-rnblj\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.877135 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-config-data\") pod \"nova-cell1-conductor-db-sync-rnblj\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.950610 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"becf5f49-d63e-4f4f-ae19-22cc57440465","Type":"ContainerStarted","Data":"e5bc3d63229d2afacd6d0c9d63acc1c125984e661a83a3c2b3830689622e371b"} Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.952488 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94","Type":"ContainerStarted","Data":"f0741912bdd0d169617238d7e50e7bbb27246829088774ec1078e4d50f1f8cd4"} Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.955120 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" event={"ID":"e9d11d8c-3a46-46bd-b2cc-01f736ccb878","Type":"ContainerStarted","Data":"b6aa22fbbf97efc511476b23f08b6ccde4658ce867827e0627a0f615eaa7452e"} Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.955163 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" event={"ID":"e9d11d8c-3a46-46bd-b2cc-01f736ccb878","Type":"ContainerStarted","Data":"14080d75936791b8b492fac4170fbd35cbbfa86688bb9ddd2e3e30a62419ceba"} Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.957308 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb","Type":"ContainerStarted","Data":"bef337983636e7bc780079603a85892fd098732fe33c5cc3eb7b6f7bdb101119"} Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.960216 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8cxf8" event={"ID":"98b3b96f-3477-4af7-b598-a1d45e6ffbc6","Type":"ContainerStarted","Data":"779ec7c3d27a9ddd6a055ed372ce6dcca6fe056357b54ffcb54ffb237d193cea"} Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.960247 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8cxf8" event={"ID":"98b3b96f-3477-4af7-b598-a1d45e6ffbc6","Type":"ContainerStarted","Data":"d95ab7dc06c953ee3ce716eb32c8676789820c28d325786cfbdb76e6db24fe4a"} Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.967369 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3bf32346-b983-4ad6-8e31-d4845e843f41","Type":"ContainerStarted","Data":"843a5ddc6198c6225f7173e7b1d424579ae6d4498956ce46c51834b4027c1fc0"} Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.979496 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-rnblj\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.979576 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-config-data\") pod \"nova-cell1-conductor-db-sync-rnblj\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.979762 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-scripts\") pod \"nova-cell1-conductor-db-sync-rnblj\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.980079 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8wgr\" (UniqueName: \"kubernetes.io/projected/a2940cf3-bd51-4393-8e05-6af05f9131ad-kube-api-access-x8wgr\") pod \"nova-cell1-conductor-db-sync-rnblj\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.985983 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-config-data\") pod \"nova-cell1-conductor-db-sync-rnblj\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.986399 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-scripts\") pod \"nova-cell1-conductor-db-sync-rnblj\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:54 crc kubenswrapper[4755]: I0202 22:55:54.989462 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-rnblj\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:55 crc kubenswrapper[4755]: I0202 22:55:55.004515 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-8cxf8" podStartSLOduration=3.004496813 podStartE2EDuration="3.004496813s" podCreationTimestamp="2026-02-02 22:55:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:55:55.001249731 +0000 UTC m=+1310.692470067" watchObservedRunningTime="2026-02-02 22:55:55.004496813 +0000 UTC m=+1310.695717129" Feb 02 22:55:55 crc kubenswrapper[4755]: I0202 22:55:55.006888 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8wgr\" (UniqueName: \"kubernetes.io/projected/a2940cf3-bd51-4393-8e05-6af05f9131ad-kube-api-access-x8wgr\") pod \"nova-cell1-conductor-db-sync-rnblj\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:55 crc kubenswrapper[4755]: I0202 22:55:55.086793 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:55:55 crc kubenswrapper[4755]: I0202 22:55:55.699999 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rnblj"] Feb 02 22:55:55 crc kubenswrapper[4755]: I0202 22:55:55.980048 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-rnblj" event={"ID":"a2940cf3-bd51-4393-8e05-6af05f9131ad","Type":"ContainerStarted","Data":"b4cb424a0e25320190c7e97f0052b160b6836e6becb29d14988b9e4957c707f6"} Feb 02 22:55:55 crc kubenswrapper[4755]: I0202 22:55:55.984741 4755 generic.go:334] "Generic (PLEG): container finished" podID="e9d11d8c-3a46-46bd-b2cc-01f736ccb878" containerID="b6aa22fbbf97efc511476b23f08b6ccde4658ce867827e0627a0f615eaa7452e" exitCode=0 Feb 02 22:55:55 crc kubenswrapper[4755]: I0202 22:55:55.986065 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" event={"ID":"e9d11d8c-3a46-46bd-b2cc-01f736ccb878","Type":"ContainerDied","Data":"b6aa22fbbf97efc511476b23f08b6ccde4658ce867827e0627a0f615eaa7452e"} Feb 02 22:55:56 crc kubenswrapper[4755]: I0202 22:55:56.874789 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 22:55:56 crc kubenswrapper[4755]: I0202 22:55:56.886239 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:55:57 crc kubenswrapper[4755]: I0202 22:55:57.006572 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-rnblj" event={"ID":"a2940cf3-bd51-4393-8e05-6af05f9131ad","Type":"ContainerStarted","Data":"2360ddbfc06549d7f0a9b865f800a78fc532f116e0c42aa4dad699141a4387ec"} Feb 02 22:55:57 crc kubenswrapper[4755]: I0202 22:55:57.037081 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-rnblj" podStartSLOduration=3.037063438 podStartE2EDuration="3.037063438s" podCreationTimestamp="2026-02-02 22:55:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:55:57.029137166 +0000 UTC m=+1312.720357492" watchObservedRunningTime="2026-02-02 22:55:57.037063438 +0000 UTC m=+1312.728283764" Feb 02 22:55:58 crc kubenswrapper[4755]: I0202 22:55:58.021507 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" event={"ID":"e9d11d8c-3a46-46bd-b2cc-01f736ccb878","Type":"ContainerStarted","Data":"0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452"} Feb 02 22:55:58 crc kubenswrapper[4755]: I0202 22:55:58.053268 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" podStartSLOduration=5.053243013 podStartE2EDuration="5.053243013s" podCreationTimestamp="2026-02-02 22:55:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:55:58.046325559 +0000 UTC m=+1313.737545905" watchObservedRunningTime="2026-02-02 22:55:58.053243013 +0000 UTC m=+1313.744463349" Feb 02 22:55:58 crc kubenswrapper[4755]: I0202 22:55:58.900127 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:55:59 crc kubenswrapper[4755]: I0202 22:55:59.032650 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3bf32346-b983-4ad6-8e31-d4845e843f41","Type":"ContainerStarted","Data":"4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4"} Feb 02 22:55:59 crc kubenswrapper[4755]: I0202 22:55:59.032888 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="3bf32346-b983-4ad6-8e31-d4845e843f41" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4" gracePeriod=30 Feb 02 22:55:59 crc kubenswrapper[4755]: I0202 22:55:59.035634 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"becf5f49-d63e-4f4f-ae19-22cc57440465","Type":"ContainerStarted","Data":"914e88491147ab79d5218f29c161a35d3c453cdef3bf2a35ac29d1a3c2e4012d"} Feb 02 22:55:59 crc kubenswrapper[4755]: I0202 22:55:59.036916 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94","Type":"ContainerStarted","Data":"84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9"} Feb 02 22:55:59 crc kubenswrapper[4755]: I0202 22:55:59.040660 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb","Type":"ContainerStarted","Data":"f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc"} Feb 02 22:55:59 crc kubenswrapper[4755]: I0202 22:55:59.062481 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.087198743 podStartE2EDuration="6.062464363s" podCreationTimestamp="2026-02-02 22:55:53 +0000 UTC" firstStartedPulling="2026-02-02 22:55:54.427166162 +0000 UTC m=+1310.118386488" lastFinishedPulling="2026-02-02 22:55:58.402431782 +0000 UTC m=+1314.093652108" observedRunningTime="2026-02-02 22:55:59.056358742 +0000 UTC m=+1314.747579068" watchObservedRunningTime="2026-02-02 22:55:59.062464363 +0000 UTC m=+1314.753684689" Feb 02 22:55:59 crc kubenswrapper[4755]: I0202 22:55:59.099674 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.102123222 podStartE2EDuration="6.099658437s" podCreationTimestamp="2026-02-02 22:55:53 +0000 UTC" firstStartedPulling="2026-02-02 22:55:54.40285656 +0000 UTC m=+1310.094076886" lastFinishedPulling="2026-02-02 22:55:58.400391775 +0000 UTC m=+1314.091612101" observedRunningTime="2026-02-02 22:55:59.090092279 +0000 UTC m=+1314.781312605" watchObservedRunningTime="2026-02-02 22:55:59.099658437 +0000 UTC m=+1314.790878763" Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.052482 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb","Type":"ContainerStarted","Data":"e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630"} Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.052619 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" containerName="nova-metadata-log" containerID="cri-o://f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc" gracePeriod=30 Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.053287 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" containerName="nova-metadata-metadata" containerID="cri-o://e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630" gracePeriod=30 Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.061538 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"becf5f49-d63e-4f4f-ae19-22cc57440465","Type":"ContainerStarted","Data":"dd199947ff94e8ac15285616d586e78e5d01bb2083b172aa07d290d095753058"} Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.083845 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.2706213809999998 podStartE2EDuration="7.083821613s" podCreationTimestamp="2026-02-02 22:55:53 +0000 UTC" firstStartedPulling="2026-02-02 22:55:54.589875158 +0000 UTC m=+1310.281095474" lastFinishedPulling="2026-02-02 22:55:58.40307538 +0000 UTC m=+1314.094295706" observedRunningTime="2026-02-02 22:56:00.071935619 +0000 UTC m=+1315.763155945" watchObservedRunningTime="2026-02-02 22:56:00.083821613 +0000 UTC m=+1315.775041939" Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.104710 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.941774023 podStartE2EDuration="8.104691589s" podCreationTimestamp="2026-02-02 22:55:52 +0000 UTC" firstStartedPulling="2026-02-02 22:55:54.242672005 +0000 UTC m=+1309.933892341" lastFinishedPulling="2026-02-02 22:55:58.405589581 +0000 UTC m=+1314.096809907" observedRunningTime="2026-02-02 22:56:00.09582961 +0000 UTC m=+1315.787049946" watchObservedRunningTime="2026-02-02 22:56:00.104691589 +0000 UTC m=+1315.795911915" Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.696174 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.864568 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-logs\") pod \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.864964 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-config-data\") pod \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.865030 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-logs" (OuterVolumeSpecName: "logs") pod "21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" (UID: "21a6a4af-4126-48d2-b839-b0d9e5ec3fbb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.865131 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-combined-ca-bundle\") pod \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.865216 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlxm6\" (UniqueName: \"kubernetes.io/projected/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-kube-api-access-dlxm6\") pod \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\" (UID: \"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb\") " Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.865933 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.881995 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-kube-api-access-dlxm6" (OuterVolumeSpecName: "kube-api-access-dlxm6") pod "21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" (UID: "21a6a4af-4126-48d2-b839-b0d9e5ec3fbb"). InnerVolumeSpecName "kube-api-access-dlxm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.906869 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-config-data" (OuterVolumeSpecName: "config-data") pod "21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" (UID: "21a6a4af-4126-48d2-b839-b0d9e5ec3fbb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.926631 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" (UID: "21a6a4af-4126-48d2-b839-b0d9e5ec3fbb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.967869 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.967910 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:00 crc kubenswrapper[4755]: I0202 22:56:00.967921 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlxm6\" (UniqueName: \"kubernetes.io/projected/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb-kube-api-access-dlxm6\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.070510 4755 generic.go:334] "Generic (PLEG): container finished" podID="21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" containerID="e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630" exitCode=0 Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.070551 4755 generic.go:334] "Generic (PLEG): container finished" podID="21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" containerID="f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc" exitCode=143 Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.070611 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.085986 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb","Type":"ContainerDied","Data":"e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630"} Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.086044 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb","Type":"ContainerDied","Data":"f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc"} Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.086067 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21a6a4af-4126-48d2-b839-b0d9e5ec3fbb","Type":"ContainerDied","Data":"bef337983636e7bc780079603a85892fd098732fe33c5cc3eb7b6f7bdb101119"} Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.086099 4755 scope.go:117] "RemoveContainer" containerID="e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.109422 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.117442 4755 scope.go:117] "RemoveContainer" containerID="f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.119941 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.137455 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:01 crc kubenswrapper[4755]: E0202 22:56:01.138225 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" containerName="nova-metadata-metadata" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.138344 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" containerName="nova-metadata-metadata" Feb 02 22:56:01 crc kubenswrapper[4755]: E0202 22:56:01.138574 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" containerName="nova-metadata-log" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.138654 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" containerName="nova-metadata-log" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.138995 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" containerName="nova-metadata-log" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.139092 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" containerName="nova-metadata-metadata" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.140712 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.150426 4755 scope.go:117] "RemoveContainer" containerID="e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.151157 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.151494 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 22:56:01 crc kubenswrapper[4755]: E0202 22:56:01.158353 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630\": container with ID starting with e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630 not found: ID does not exist" containerID="e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.158399 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630"} err="failed to get container status \"e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630\": rpc error: code = NotFound desc = could not find container \"e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630\": container with ID starting with e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630 not found: ID does not exist" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.158423 4755 scope.go:117] "RemoveContainer" containerID="f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc" Feb 02 22:56:01 crc kubenswrapper[4755]: E0202 22:56:01.159477 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc\": container with ID starting with f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc not found: ID does not exist" containerID="f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.159666 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc"} err="failed to get container status \"f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc\": rpc error: code = NotFound desc = could not find container \"f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc\": container with ID starting with f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc not found: ID does not exist" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.159829 4755 scope.go:117] "RemoveContainer" containerID="e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.161911 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630"} err="failed to get container status \"e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630\": rpc error: code = NotFound desc = could not find container \"e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630\": container with ID starting with e0dd321737e48f16ed575f80ed79bf1c097aada3f305a97d87e560ba2625b630 not found: ID does not exist" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.161972 4755 scope.go:117] "RemoveContainer" containerID="f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.162928 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc"} err="failed to get container status \"f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc\": rpc error: code = NotFound desc = could not find container \"f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc\": container with ID starting with f58e7c14089278c7505dfd86b431377b8cc004b8a482e5fdb5181df80828dbfc not found: ID does not exist" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.245964 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.299227 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44652adb-e3d2-43f1-91f3-1bc04e42a97d-logs\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.299271 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.299301 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-config-data\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.299744 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7t85\" (UniqueName: \"kubernetes.io/projected/44652adb-e3d2-43f1-91f3-1bc04e42a97d-kube-api-access-k7t85\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.299891 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.401958 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7t85\" (UniqueName: \"kubernetes.io/projected/44652adb-e3d2-43f1-91f3-1bc04e42a97d-kube-api-access-k7t85\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.402054 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.402100 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.402122 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44652adb-e3d2-43f1-91f3-1bc04e42a97d-logs\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.402163 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-config-data\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.403108 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44652adb-e3d2-43f1-91f3-1bc04e42a97d-logs\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.407108 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.407451 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.414560 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-config-data\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.420978 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7t85\" (UniqueName: \"kubernetes.io/projected/44652adb-e3d2-43f1-91f3-1bc04e42a97d-kube-api-access-k7t85\") pod \"nova-metadata-0\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " pod="openstack/nova-metadata-0" Feb 02 22:56:01 crc kubenswrapper[4755]: I0202 22:56:01.539826 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:56:02 crc kubenswrapper[4755]: I0202 22:56:02.110599 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.082575 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21a6a4af-4126-48d2-b839-b0d9e5ec3fbb" path="/var/lib/kubelet/pods/21a6a4af-4126-48d2-b839-b0d9e5ec3fbb/volumes" Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.097961 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"44652adb-e3d2-43f1-91f3-1bc04e42a97d","Type":"ContainerStarted","Data":"66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7"} Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.098016 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"44652adb-e3d2-43f1-91f3-1bc04e42a97d","Type":"ContainerStarted","Data":"be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4"} Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.098035 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"44652adb-e3d2-43f1-91f3-1bc04e42a97d","Type":"ContainerStarted","Data":"d86f494743fe0ba59d9fce30570a392e901f6a05c2e6aa8b61f09f51d0b9aa58"} Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.100218 4755 generic.go:334] "Generic (PLEG): container finished" podID="98b3b96f-3477-4af7-b598-a1d45e6ffbc6" containerID="779ec7c3d27a9ddd6a055ed372ce6dcca6fe056357b54ffcb54ffb237d193cea" exitCode=0 Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.100260 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8cxf8" event={"ID":"98b3b96f-3477-4af7-b598-a1d45e6ffbc6","Type":"ContainerDied","Data":"779ec7c3d27a9ddd6a055ed372ce6dcca6fe056357b54ffcb54ffb237d193cea"} Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.123386 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.123369475 podStartE2EDuration="2.123369475s" podCreationTimestamp="2026-02-02 22:56:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:56:03.116870003 +0000 UTC m=+1318.808090339" watchObservedRunningTime="2026-02-02 22:56:03.123369475 +0000 UTC m=+1318.814589801" Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.289877 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.290362 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.490758 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.571042 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.571368 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.605413 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.902004 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.973514 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58bd69657f-l4lch"] Feb 02 22:56:03 crc kubenswrapper[4755]: I0202 22:56:03.973801 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" podUID="344ddf7e-967c-4278-90f8-92951d46fa13" containerName="dnsmasq-dns" containerID="cri-o://8bbbd80966d4299b6bfdf7603df94c32541b157ee7a997feac3dbae41d6e13bc" gracePeriod=10 Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.139113 4755 generic.go:334] "Generic (PLEG): container finished" podID="344ddf7e-967c-4278-90f8-92951d46fa13" containerID="8bbbd80966d4299b6bfdf7603df94c32541b157ee7a997feac3dbae41d6e13bc" exitCode=0 Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.139497 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" event={"ID":"344ddf7e-967c-4278-90f8-92951d46fa13","Type":"ContainerDied","Data":"8bbbd80966d4299b6bfdf7603df94c32541b157ee7a997feac3dbae41d6e13bc"} Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.177963 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.371929 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="becf5f49-d63e-4f4f-ae19-22cc57440465" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.215:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.372016 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="becf5f49-d63e-4f4f-ae19-22cc57440465" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.215:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.853999 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.857812 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.881631 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkpw2\" (UniqueName: \"kubernetes.io/projected/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-kube-api-access-vkpw2\") pod \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.881960 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-config-data\") pod \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.882029 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-scripts\") pod \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.882073 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fx2xw\" (UniqueName: \"kubernetes.io/projected/344ddf7e-967c-4278-90f8-92951d46fa13-kube-api-access-fx2xw\") pod \"344ddf7e-967c-4278-90f8-92951d46fa13\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.882107 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-ovsdbserver-nb\") pod \"344ddf7e-967c-4278-90f8-92951d46fa13\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.882174 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-dns-svc\") pod \"344ddf7e-967c-4278-90f8-92951d46fa13\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.882201 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-config\") pod \"344ddf7e-967c-4278-90f8-92951d46fa13\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.882567 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-ovsdbserver-sb\") pod \"344ddf7e-967c-4278-90f8-92951d46fa13\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.882633 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-dns-swift-storage-0\") pod \"344ddf7e-967c-4278-90f8-92951d46fa13\" (UID: \"344ddf7e-967c-4278-90f8-92951d46fa13\") " Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.882678 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-combined-ca-bundle\") pod \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\" (UID: \"98b3b96f-3477-4af7-b598-a1d45e6ffbc6\") " Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.891267 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-scripts" (OuterVolumeSpecName: "scripts") pod "98b3b96f-3477-4af7-b598-a1d45e6ffbc6" (UID: "98b3b96f-3477-4af7-b598-a1d45e6ffbc6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.891357 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-kube-api-access-vkpw2" (OuterVolumeSpecName: "kube-api-access-vkpw2") pod "98b3b96f-3477-4af7-b598-a1d45e6ffbc6" (UID: "98b3b96f-3477-4af7-b598-a1d45e6ffbc6"). InnerVolumeSpecName "kube-api-access-vkpw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.901932 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/344ddf7e-967c-4278-90f8-92951d46fa13-kube-api-access-fx2xw" (OuterVolumeSpecName: "kube-api-access-fx2xw") pod "344ddf7e-967c-4278-90f8-92951d46fa13" (UID: "344ddf7e-967c-4278-90f8-92951d46fa13"). InnerVolumeSpecName "kube-api-access-fx2xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.939712 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "98b3b96f-3477-4af7-b598-a1d45e6ffbc6" (UID: "98b3b96f-3477-4af7-b598-a1d45e6ffbc6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.955150 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-config-data" (OuterVolumeSpecName: "config-data") pod "98b3b96f-3477-4af7-b598-a1d45e6ffbc6" (UID: "98b3b96f-3477-4af7-b598-a1d45e6ffbc6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:04 crc kubenswrapper[4755]: I0202 22:56:04.970972 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "344ddf7e-967c-4278-90f8-92951d46fa13" (UID: "344ddf7e-967c-4278-90f8-92951d46fa13"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:04.996664 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "344ddf7e-967c-4278-90f8-92951d46fa13" (UID: "344ddf7e-967c-4278-90f8-92951d46fa13"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.000688 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "344ddf7e-967c-4278-90f8-92951d46fa13" (UID: "344ddf7e-967c-4278-90f8-92951d46fa13"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.000740 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.000812 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.000827 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.000874 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkpw2\" (UniqueName: \"kubernetes.io/projected/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-kube-api-access-vkpw2\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.000891 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.000903 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/98b3b96f-3477-4af7-b598-a1d45e6ffbc6-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.000922 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fx2xw\" (UniqueName: \"kubernetes.io/projected/344ddf7e-967c-4278-90f8-92951d46fa13-kube-api-access-fx2xw\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.018067 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "344ddf7e-967c-4278-90f8-92951d46fa13" (UID: "344ddf7e-967c-4278-90f8-92951d46fa13"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.033094 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-config" (OuterVolumeSpecName: "config") pod "344ddf7e-967c-4278-90f8-92951d46fa13" (UID: "344ddf7e-967c-4278-90f8-92951d46fa13"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.041704 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.102875 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.102911 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.102924 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/344ddf7e-967c-4278-90f8-92951d46fa13-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.202155 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.202164 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd69657f-l4lch" event={"ID":"344ddf7e-967c-4278-90f8-92951d46fa13","Type":"ContainerDied","Data":"ff05c844888d58ee4a1ef9ef60019f658f772f2e5623d0249fde2c44dee82cfe"} Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.202215 4755 scope.go:117] "RemoveContainer" containerID="8bbbd80966d4299b6bfdf7603df94c32541b157ee7a997feac3dbae41d6e13bc" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.238405 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8cxf8" event={"ID":"98b3b96f-3477-4af7-b598-a1d45e6ffbc6","Type":"ContainerDied","Data":"d95ab7dc06c953ee3ce716eb32c8676789820c28d325786cfbdb76e6db24fe4a"} Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.238448 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d95ab7dc06c953ee3ce716eb32c8676789820c28d325786cfbdb76e6db24fe4a" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.238546 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8cxf8" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.262235 4755 generic.go:334] "Generic (PLEG): container finished" podID="a2940cf3-bd51-4393-8e05-6af05f9131ad" containerID="2360ddbfc06549d7f0a9b865f800a78fc532f116e0c42aa4dad699141a4387ec" exitCode=0 Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.262848 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-rnblj" event={"ID":"a2940cf3-bd51-4393-8e05-6af05f9131ad","Type":"ContainerDied","Data":"2360ddbfc06549d7f0a9b865f800a78fc532f116e0c42aa4dad699141a4387ec"} Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.407187 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.407432 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="becf5f49-d63e-4f4f-ae19-22cc57440465" containerName="nova-api-log" containerID="cri-o://914e88491147ab79d5218f29c161a35d3c453cdef3bf2a35ac29d1a3c2e4012d" gracePeriod=30 Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.408250 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="becf5f49-d63e-4f4f-ae19-22cc57440465" containerName="nova-api-api" containerID="cri-o://dd199947ff94e8ac15285616d586e78e5d01bb2083b172aa07d290d095753058" gracePeriod=30 Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.456115 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.456345 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="44652adb-e3d2-43f1-91f3-1bc04e42a97d" containerName="nova-metadata-log" containerID="cri-o://be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4" gracePeriod=30 Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.456838 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="44652adb-e3d2-43f1-91f3-1bc04e42a97d" containerName="nova-metadata-metadata" containerID="cri-o://66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7" gracePeriod=30 Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.528935 4755 scope.go:117] "RemoveContainer" containerID="8af799ed80b84a7df7829f0d5e65b3e44801f260658f387450104ef818096a9c" Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.543338 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58bd69657f-l4lch"] Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.562649 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58bd69657f-l4lch"] Feb 02 22:56:05 crc kubenswrapper[4755]: I0202 22:56:05.652624 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.065064 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.134134 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-nova-metadata-tls-certs\") pod \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.134276 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-config-data\") pod \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.134329 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-combined-ca-bundle\") pod \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.134361 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44652adb-e3d2-43f1-91f3-1bc04e42a97d-logs\") pod \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.134403 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7t85\" (UniqueName: \"kubernetes.io/projected/44652adb-e3d2-43f1-91f3-1bc04e42a97d-kube-api-access-k7t85\") pod \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\" (UID: \"44652adb-e3d2-43f1-91f3-1bc04e42a97d\") " Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.136252 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44652adb-e3d2-43f1-91f3-1bc04e42a97d-logs" (OuterVolumeSpecName: "logs") pod "44652adb-e3d2-43f1-91f3-1bc04e42a97d" (UID: "44652adb-e3d2-43f1-91f3-1bc04e42a97d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.142362 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44652adb-e3d2-43f1-91f3-1bc04e42a97d-kube-api-access-k7t85" (OuterVolumeSpecName: "kube-api-access-k7t85") pod "44652adb-e3d2-43f1-91f3-1bc04e42a97d" (UID: "44652adb-e3d2-43f1-91f3-1bc04e42a97d"). InnerVolumeSpecName "kube-api-access-k7t85". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.168690 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-config-data" (OuterVolumeSpecName: "config-data") pod "44652adb-e3d2-43f1-91f3-1bc04e42a97d" (UID: "44652adb-e3d2-43f1-91f3-1bc04e42a97d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.205777 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44652adb-e3d2-43f1-91f3-1bc04e42a97d" (UID: "44652adb-e3d2-43f1-91f3-1bc04e42a97d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.236582 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.236606 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.236620 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44652adb-e3d2-43f1-91f3-1bc04e42a97d-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.236630 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7t85\" (UniqueName: \"kubernetes.io/projected/44652adb-e3d2-43f1-91f3-1bc04e42a97d-kube-api-access-k7t85\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.256862 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "44652adb-e3d2-43f1-91f3-1bc04e42a97d" (UID: "44652adb-e3d2-43f1-91f3-1bc04e42a97d"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.300244 4755 generic.go:334] "Generic (PLEG): container finished" podID="44652adb-e3d2-43f1-91f3-1bc04e42a97d" containerID="66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7" exitCode=0 Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.300277 4755 generic.go:334] "Generic (PLEG): container finished" podID="44652adb-e3d2-43f1-91f3-1bc04e42a97d" containerID="be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4" exitCode=143 Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.300321 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"44652adb-e3d2-43f1-91f3-1bc04e42a97d","Type":"ContainerDied","Data":"66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7"} Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.300348 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"44652adb-e3d2-43f1-91f3-1bc04e42a97d","Type":"ContainerDied","Data":"be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4"} Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.300358 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"44652adb-e3d2-43f1-91f3-1bc04e42a97d","Type":"ContainerDied","Data":"d86f494743fe0ba59d9fce30570a392e901f6a05c2e6aa8b61f09f51d0b9aa58"} Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.300373 4755 scope.go:117] "RemoveContainer" containerID="66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.300472 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.309034 4755 generic.go:334] "Generic (PLEG): container finished" podID="becf5f49-d63e-4f4f-ae19-22cc57440465" containerID="914e88491147ab79d5218f29c161a35d3c453cdef3bf2a35ac29d1a3c2e4012d" exitCode=143 Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.309218 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"becf5f49-d63e-4f4f-ae19-22cc57440465","Type":"ContainerDied","Data":"914e88491147ab79d5218f29c161a35d3c453cdef3bf2a35ac29d1a3c2e4012d"} Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.339289 4755 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/44652adb-e3d2-43f1-91f3-1bc04e42a97d-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.379958 4755 scope.go:117] "RemoveContainer" containerID="be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.381769 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.402254 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.414566 4755 scope.go:117] "RemoveContainer" containerID="66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7" Feb 02 22:56:06 crc kubenswrapper[4755]: E0202 22:56:06.416022 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7\": container with ID starting with 66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7 not found: ID does not exist" containerID="66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.416719 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7"} err="failed to get container status \"66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7\": rpc error: code = NotFound desc = could not find container \"66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7\": container with ID starting with 66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7 not found: ID does not exist" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.416822 4755 scope.go:117] "RemoveContainer" containerID="be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4" Feb 02 22:56:06 crc kubenswrapper[4755]: E0202 22:56:06.417327 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4\": container with ID starting with be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4 not found: ID does not exist" containerID="be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.417444 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4"} err="failed to get container status \"be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4\": rpc error: code = NotFound desc = could not find container \"be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4\": container with ID starting with be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4 not found: ID does not exist" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.417458 4755 scope.go:117] "RemoveContainer" containerID="66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.417933 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7"} err="failed to get container status \"66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7\": rpc error: code = NotFound desc = could not find container \"66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7\": container with ID starting with 66edd9f9fab7c6e774d0e9ae2c826e2c7876989a34037dbb382dcfd6249959f7 not found: ID does not exist" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.417952 4755 scope.go:117] "RemoveContainer" containerID="be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.427830 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4"} err="failed to get container status \"be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4\": rpc error: code = NotFound desc = could not find container \"be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4\": container with ID starting with be2e52f084addd742d847247687891b05600c86e73e16f5256685b9b055210f4 not found: ID does not exist" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.432802 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:06 crc kubenswrapper[4755]: E0202 22:56:06.433208 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="344ddf7e-967c-4278-90f8-92951d46fa13" containerName="dnsmasq-dns" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.433224 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="344ddf7e-967c-4278-90f8-92951d46fa13" containerName="dnsmasq-dns" Feb 02 22:56:06 crc kubenswrapper[4755]: E0202 22:56:06.433246 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44652adb-e3d2-43f1-91f3-1bc04e42a97d" containerName="nova-metadata-metadata" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.433253 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="44652adb-e3d2-43f1-91f3-1bc04e42a97d" containerName="nova-metadata-metadata" Feb 02 22:56:06 crc kubenswrapper[4755]: E0202 22:56:06.433276 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44652adb-e3d2-43f1-91f3-1bc04e42a97d" containerName="nova-metadata-log" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.433283 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="44652adb-e3d2-43f1-91f3-1bc04e42a97d" containerName="nova-metadata-log" Feb 02 22:56:06 crc kubenswrapper[4755]: E0202 22:56:06.433295 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="344ddf7e-967c-4278-90f8-92951d46fa13" containerName="init" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.433301 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="344ddf7e-967c-4278-90f8-92951d46fa13" containerName="init" Feb 02 22:56:06 crc kubenswrapper[4755]: E0202 22:56:06.433317 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98b3b96f-3477-4af7-b598-a1d45e6ffbc6" containerName="nova-manage" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.433323 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="98b3b96f-3477-4af7-b598-a1d45e6ffbc6" containerName="nova-manage" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.433516 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="44652adb-e3d2-43f1-91f3-1bc04e42a97d" containerName="nova-metadata-metadata" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.433530 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="44652adb-e3d2-43f1-91f3-1bc04e42a97d" containerName="nova-metadata-log" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.433538 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="98b3b96f-3477-4af7-b598-a1d45e6ffbc6" containerName="nova-manage" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.433546 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="344ddf7e-967c-4278-90f8-92951d46fa13" containerName="dnsmasq-dns" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.434564 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.439097 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.439282 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.457232 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.556198 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-logs\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.556405 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-config-data\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.556452 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.556485 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.556538 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q49ld\" (UniqueName: \"kubernetes.io/projected/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-kube-api-access-q49ld\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.658285 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q49ld\" (UniqueName: \"kubernetes.io/projected/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-kube-api-access-q49ld\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.658829 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-logs\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.659078 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-config-data\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.659183 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.659286 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.660366 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-logs\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.667640 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.667955 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.671262 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-config-data\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.676329 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q49ld\" (UniqueName: \"kubernetes.io/projected/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-kube-api-access-q49ld\") pod \"nova-metadata-0\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.756440 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.757675 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.867361 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-scripts\") pod \"a2940cf3-bd51-4393-8e05-6af05f9131ad\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.867464 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-config-data\") pod \"a2940cf3-bd51-4393-8e05-6af05f9131ad\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.867502 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-combined-ca-bundle\") pod \"a2940cf3-bd51-4393-8e05-6af05f9131ad\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.867561 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8wgr\" (UniqueName: \"kubernetes.io/projected/a2940cf3-bd51-4393-8e05-6af05f9131ad-kube-api-access-x8wgr\") pod \"a2940cf3-bd51-4393-8e05-6af05f9131ad\" (UID: \"a2940cf3-bd51-4393-8e05-6af05f9131ad\") " Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.874042 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-scripts" (OuterVolumeSpecName: "scripts") pod "a2940cf3-bd51-4393-8e05-6af05f9131ad" (UID: "a2940cf3-bd51-4393-8e05-6af05f9131ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.874077 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2940cf3-bd51-4393-8e05-6af05f9131ad-kube-api-access-x8wgr" (OuterVolumeSpecName: "kube-api-access-x8wgr") pod "a2940cf3-bd51-4393-8e05-6af05f9131ad" (UID: "a2940cf3-bd51-4393-8e05-6af05f9131ad"). InnerVolumeSpecName "kube-api-access-x8wgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.900073 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-config-data" (OuterVolumeSpecName: "config-data") pod "a2940cf3-bd51-4393-8e05-6af05f9131ad" (UID: "a2940cf3-bd51-4393-8e05-6af05f9131ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.911903 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2940cf3-bd51-4393-8e05-6af05f9131ad" (UID: "a2940cf3-bd51-4393-8e05-6af05f9131ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.969746 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.969779 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.969788 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2940cf3-bd51-4393-8e05-6af05f9131ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:06 crc kubenswrapper[4755]: I0202 22:56:06.969799 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8wgr\" (UniqueName: \"kubernetes.io/projected/a2940cf3-bd51-4393-8e05-6af05f9131ad-kube-api-access-x8wgr\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.082212 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="344ddf7e-967c-4278-90f8-92951d46fa13" path="/var/lib/kubelet/pods/344ddf7e-967c-4278-90f8-92951d46fa13/volumes" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.083043 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44652adb-e3d2-43f1-91f3-1bc04e42a97d" path="/var/lib/kubelet/pods/44652adb-e3d2-43f1-91f3-1bc04e42a97d/volumes" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.251709 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.325065 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-rnblj" event={"ID":"a2940cf3-bd51-4393-8e05-6af05f9131ad","Type":"ContainerDied","Data":"b4cb424a0e25320190c7e97f0052b160b6836e6becb29d14988b9e4957c707f6"} Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.325459 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4cb424a0e25320190c7e97f0052b160b6836e6becb29d14988b9e4957c707f6" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.325525 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-rnblj" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.337989 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="7634eaa3-85d9-49ba-b5c0-9190ef5d3f94" containerName="nova-scheduler-scheduler" containerID="cri-o://84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9" gracePeriod=30 Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.341542 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6","Type":"ContainerStarted","Data":"87efb7a547a45db2d9f5ecbdb1044ad3b89c27784bde59fe02c7258830ac2289"} Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.411444 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 02 22:56:07 crc kubenswrapper[4755]: E0202 22:56:07.411943 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2940cf3-bd51-4393-8e05-6af05f9131ad" containerName="nova-cell1-conductor-db-sync" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.411954 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2940cf3-bd51-4393-8e05-6af05f9131ad" containerName="nova-cell1-conductor-db-sync" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.412147 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2940cf3-bd51-4393-8e05-6af05f9131ad" containerName="nova-cell1-conductor-db-sync" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.412887 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.416524 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.433912 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.581889 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31b3bd9c-82ad-47a5-9275-cb6b3ea02256-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"31b3bd9c-82ad-47a5-9275-cb6b3ea02256\") " pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.582206 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsqn7\" (UniqueName: \"kubernetes.io/projected/31b3bd9c-82ad-47a5-9275-cb6b3ea02256-kube-api-access-fsqn7\") pod \"nova-cell1-conductor-0\" (UID: \"31b3bd9c-82ad-47a5-9275-cb6b3ea02256\") " pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.582239 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31b3bd9c-82ad-47a5-9275-cb6b3ea02256-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"31b3bd9c-82ad-47a5-9275-cb6b3ea02256\") " pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.686528 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31b3bd9c-82ad-47a5-9275-cb6b3ea02256-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"31b3bd9c-82ad-47a5-9275-cb6b3ea02256\") " pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.686611 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsqn7\" (UniqueName: \"kubernetes.io/projected/31b3bd9c-82ad-47a5-9275-cb6b3ea02256-kube-api-access-fsqn7\") pod \"nova-cell1-conductor-0\" (UID: \"31b3bd9c-82ad-47a5-9275-cb6b3ea02256\") " pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.686650 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31b3bd9c-82ad-47a5-9275-cb6b3ea02256-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"31b3bd9c-82ad-47a5-9275-cb6b3ea02256\") " pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.691289 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31b3bd9c-82ad-47a5-9275-cb6b3ea02256-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"31b3bd9c-82ad-47a5-9275-cb6b3ea02256\") " pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.692345 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31b3bd9c-82ad-47a5-9275-cb6b3ea02256-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"31b3bd9c-82ad-47a5-9275-cb6b3ea02256\") " pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.709497 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsqn7\" (UniqueName: \"kubernetes.io/projected/31b3bd9c-82ad-47a5-9275-cb6b3ea02256-kube-api-access-fsqn7\") pod \"nova-cell1-conductor-0\" (UID: \"31b3bd9c-82ad-47a5-9275-cb6b3ea02256\") " pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:07 crc kubenswrapper[4755]: I0202 22:56:07.751296 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:08 crc kubenswrapper[4755]: I0202 22:56:08.346249 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 02 22:56:08 crc kubenswrapper[4755]: I0202 22:56:08.348952 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"31b3bd9c-82ad-47a5-9275-cb6b3ea02256","Type":"ContainerStarted","Data":"6e8c40902308259c2e01e08f1b4df922c7937f443da219d72c77cd8ec142042b"} Feb 02 22:56:08 crc kubenswrapper[4755]: I0202 22:56:08.351837 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6","Type":"ContainerStarted","Data":"7316a42b2c095cd11c736b2760f5f4866edbf63c1a5df1fc7145e44ca64bd1a6"} Feb 02 22:56:08 crc kubenswrapper[4755]: I0202 22:56:08.351861 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6","Type":"ContainerStarted","Data":"68c9dce7fd52a56c8b74a82b43ffca965bfe2e63bed9fb7e4fd1976b68e3a2ff"} Feb 02 22:56:08 crc kubenswrapper[4755]: I0202 22:56:08.397149 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.397125353 podStartE2EDuration="2.397125353s" podCreationTimestamp="2026-02-02 22:56:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:56:08.389291373 +0000 UTC m=+1324.080511699" watchObservedRunningTime="2026-02-02 22:56:08.397125353 +0000 UTC m=+1324.088345689" Feb 02 22:56:08 crc kubenswrapper[4755]: E0202 22:56:08.580701 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 22:56:08 crc kubenswrapper[4755]: E0202 22:56:08.600449 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 22:56:08 crc kubenswrapper[4755]: E0202 22:56:08.611218 4755 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 02 22:56:08 crc kubenswrapper[4755]: E0202 22:56:08.611281 4755 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="7634eaa3-85d9-49ba-b5c0-9190ef5d3f94" containerName="nova-scheduler-scheduler" Feb 02 22:56:09 crc kubenswrapper[4755]: I0202 22:56:09.364459 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"31b3bd9c-82ad-47a5-9275-cb6b3ea02256","Type":"ContainerStarted","Data":"86a68f708b8d5762d879becc3b10d063308bc0d8c9f0dcc56ff1562179c59173"} Feb 02 22:56:09 crc kubenswrapper[4755]: I0202 22:56:09.393346 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.393324798 podStartE2EDuration="2.393324798s" podCreationTimestamp="2026-02-02 22:56:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:56:09.386529137 +0000 UTC m=+1325.077749463" watchObservedRunningTime="2026-02-02 22:56:09.393324798 +0000 UTC m=+1325.084545134" Feb 02 22:56:09 crc kubenswrapper[4755]: I0202 22:56:09.990590 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 22:56:09 crc kubenswrapper[4755]: I0202 22:56:09.990799 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="50c1f741-a034-4f5f-8cb2-1e7c5df9a090" containerName="kube-state-metrics" containerID="cri-o://ac007e2e081ce2813bc19a9e76b1d7e19f9bcfa2074c4e1c1a164ae06fb5a922" gracePeriod=30 Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.383743 4755 generic.go:334] "Generic (PLEG): container finished" podID="50c1f741-a034-4f5f-8cb2-1e7c5df9a090" containerID="ac007e2e081ce2813bc19a9e76b1d7e19f9bcfa2074c4e1c1a164ae06fb5a922" exitCode=2 Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.384079 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"50c1f741-a034-4f5f-8cb2-1e7c5df9a090","Type":"ContainerDied","Data":"ac007e2e081ce2813bc19a9e76b1d7e19f9bcfa2074c4e1c1a164ae06fb5a922"} Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.388525 4755 generic.go:334] "Generic (PLEG): container finished" podID="becf5f49-d63e-4f4f-ae19-22cc57440465" containerID="dd199947ff94e8ac15285616d586e78e5d01bb2083b172aa07d290d095753058" exitCode=0 Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.389434 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"becf5f49-d63e-4f4f-ae19-22cc57440465","Type":"ContainerDied","Data":"dd199947ff94e8ac15285616d586e78e5d01bb2083b172aa07d290d095753058"} Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.389470 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.546492 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.555424 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.585777 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bklwn\" (UniqueName: \"kubernetes.io/projected/becf5f49-d63e-4f4f-ae19-22cc57440465-kube-api-access-bklwn\") pod \"becf5f49-d63e-4f4f-ae19-22cc57440465\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.585914 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/becf5f49-d63e-4f4f-ae19-22cc57440465-logs\") pod \"becf5f49-d63e-4f4f-ae19-22cc57440465\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.585950 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/becf5f49-d63e-4f4f-ae19-22cc57440465-config-data\") pod \"becf5f49-d63e-4f4f-ae19-22cc57440465\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.585974 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/becf5f49-d63e-4f4f-ae19-22cc57440465-combined-ca-bundle\") pod \"becf5f49-d63e-4f4f-ae19-22cc57440465\" (UID: \"becf5f49-d63e-4f4f-ae19-22cc57440465\") " Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.586007 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9bf7\" (UniqueName: \"kubernetes.io/projected/50c1f741-a034-4f5f-8cb2-1e7c5df9a090-kube-api-access-r9bf7\") pod \"50c1f741-a034-4f5f-8cb2-1e7c5df9a090\" (UID: \"50c1f741-a034-4f5f-8cb2-1e7c5df9a090\") " Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.586402 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/becf5f49-d63e-4f4f-ae19-22cc57440465-logs" (OuterVolumeSpecName: "logs") pod "becf5f49-d63e-4f4f-ae19-22cc57440465" (UID: "becf5f49-d63e-4f4f-ae19-22cc57440465"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.587186 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/becf5f49-d63e-4f4f-ae19-22cc57440465-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.591269 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/becf5f49-d63e-4f4f-ae19-22cc57440465-kube-api-access-bklwn" (OuterVolumeSpecName: "kube-api-access-bklwn") pod "becf5f49-d63e-4f4f-ae19-22cc57440465" (UID: "becf5f49-d63e-4f4f-ae19-22cc57440465"). InnerVolumeSpecName "kube-api-access-bklwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.595653 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50c1f741-a034-4f5f-8cb2-1e7c5df9a090-kube-api-access-r9bf7" (OuterVolumeSpecName: "kube-api-access-r9bf7") pod "50c1f741-a034-4f5f-8cb2-1e7c5df9a090" (UID: "50c1f741-a034-4f5f-8cb2-1e7c5df9a090"). InnerVolumeSpecName "kube-api-access-r9bf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.634013 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/becf5f49-d63e-4f4f-ae19-22cc57440465-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "becf5f49-d63e-4f4f-ae19-22cc57440465" (UID: "becf5f49-d63e-4f4f-ae19-22cc57440465"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.636573 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/becf5f49-d63e-4f4f-ae19-22cc57440465-config-data" (OuterVolumeSpecName: "config-data") pod "becf5f49-d63e-4f4f-ae19-22cc57440465" (UID: "becf5f49-d63e-4f4f-ae19-22cc57440465"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.690509 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bklwn\" (UniqueName: \"kubernetes.io/projected/becf5f49-d63e-4f4f-ae19-22cc57440465-kube-api-access-bklwn\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.690547 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/becf5f49-d63e-4f4f-ae19-22cc57440465-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.690558 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/becf5f49-d63e-4f4f-ae19-22cc57440465-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:10 crc kubenswrapper[4755]: I0202 22:56:10.690567 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9bf7\" (UniqueName: \"kubernetes.io/projected/50c1f741-a034-4f5f-8cb2-1e7c5df9a090-kube-api-access-r9bf7\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.168297 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.201288 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bvl4\" (UniqueName: \"kubernetes.io/projected/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-kube-api-access-8bvl4\") pod \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\" (UID: \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\") " Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.201375 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-combined-ca-bundle\") pod \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\" (UID: \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\") " Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.201481 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-config-data\") pod \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\" (UID: \"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94\") " Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.208947 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-kube-api-access-8bvl4" (OuterVolumeSpecName: "kube-api-access-8bvl4") pod "7634eaa3-85d9-49ba-b5c0-9190ef5d3f94" (UID: "7634eaa3-85d9-49ba-b5c0-9190ef5d3f94"). InnerVolumeSpecName "kube-api-access-8bvl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.243509 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-config-data" (OuterVolumeSpecName: "config-data") pod "7634eaa3-85d9-49ba-b5c0-9190ef5d3f94" (UID: "7634eaa3-85d9-49ba-b5c0-9190ef5d3f94"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.258898 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7634eaa3-85d9-49ba-b5c0-9190ef5d3f94" (UID: "7634eaa3-85d9-49ba-b5c0-9190ef5d3f94"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.303668 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bvl4\" (UniqueName: \"kubernetes.io/projected/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-kube-api-access-8bvl4\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.303717 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.303738 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.401291 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"becf5f49-d63e-4f4f-ae19-22cc57440465","Type":"ContainerDied","Data":"e5bc3d63229d2afacd6d0c9d63acc1c125984e661a83a3c2b3830689622e371b"} Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.401294 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.401353 4755 scope.go:117] "RemoveContainer" containerID="dd199947ff94e8ac15285616d586e78e5d01bb2083b172aa07d290d095753058" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.404515 4755 generic.go:334] "Generic (PLEG): container finished" podID="7634eaa3-85d9-49ba-b5c0-9190ef5d3f94" containerID="84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9" exitCode=0 Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.404580 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94","Type":"ContainerDied","Data":"84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9"} Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.404608 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7634eaa3-85d9-49ba-b5c0-9190ef5d3f94","Type":"ContainerDied","Data":"f0741912bdd0d169617238d7e50e7bbb27246829088774ec1078e4d50f1f8cd4"} Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.404657 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.407677 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.408023 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"50c1f741-a034-4f5f-8cb2-1e7c5df9a090","Type":"ContainerDied","Data":"02048ae52cef47d5e17968500d6274e063ce6e19bc65ba7d2c63c76f4092cce1"} Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.429709 4755 scope.go:117] "RemoveContainer" containerID="914e88491147ab79d5218f29c161a35d3c453cdef3bf2a35ac29d1a3c2e4012d" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.438931 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.449169 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.468922 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.484748 4755 scope.go:117] "RemoveContainer" containerID="84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.499785 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:11 crc kubenswrapper[4755]: E0202 22:56:11.500344 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7634eaa3-85d9-49ba-b5c0-9190ef5d3f94" containerName="nova-scheduler-scheduler" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.502011 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="7634eaa3-85d9-49ba-b5c0-9190ef5d3f94" containerName="nova-scheduler-scheduler" Feb 02 22:56:11 crc kubenswrapper[4755]: E0202 22:56:11.502041 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="becf5f49-d63e-4f4f-ae19-22cc57440465" containerName="nova-api-api" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.502051 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="becf5f49-d63e-4f4f-ae19-22cc57440465" containerName="nova-api-api" Feb 02 22:56:11 crc kubenswrapper[4755]: E0202 22:56:11.502063 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50c1f741-a034-4f5f-8cb2-1e7c5df9a090" containerName="kube-state-metrics" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.502071 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="50c1f741-a034-4f5f-8cb2-1e7c5df9a090" containerName="kube-state-metrics" Feb 02 22:56:11 crc kubenswrapper[4755]: E0202 22:56:11.502087 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="becf5f49-d63e-4f4f-ae19-22cc57440465" containerName="nova-api-log" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.502096 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="becf5f49-d63e-4f4f-ae19-22cc57440465" containerName="nova-api-log" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.502407 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="50c1f741-a034-4f5f-8cb2-1e7c5df9a090" containerName="kube-state-metrics" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.502431 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="becf5f49-d63e-4f4f-ae19-22cc57440465" containerName="nova-api-log" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.502457 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="7634eaa3-85d9-49ba-b5c0-9190ef5d3f94" containerName="nova-scheduler-scheduler" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.502468 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="becf5f49-d63e-4f4f-ae19-22cc57440465" containerName="nova-api-api" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.503906 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.507795 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.510945 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.537908 4755 scope.go:117] "RemoveContainer" containerID="84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9" Feb 02 22:56:11 crc kubenswrapper[4755]: E0202 22:56:11.539457 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9\": container with ID starting with 84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9 not found: ID does not exist" containerID="84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.539489 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9"} err="failed to get container status \"84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9\": rpc error: code = NotFound desc = could not find container \"84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9\": container with ID starting with 84c8d1de0cb2015043ea30ad860a76546f117c5265032b7f632d94368b5b06a9 not found: ID does not exist" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.539511 4755 scope.go:117] "RemoveContainer" containerID="ac007e2e081ce2813bc19a9e76b1d7e19f9bcfa2074c4e1c1a164ae06fb5a922" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.544277 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.564810 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.576215 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.616120 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-config-data\") pod \"nova-api-0\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.616228 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m79f\" (UniqueName: \"kubernetes.io/projected/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-kube-api-access-5m79f\") pod \"nova-api-0\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.616275 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.616302 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-logs\") pod \"nova-api-0\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.621863 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.639714 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.647680 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.655332 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.655347 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.668622 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.671079 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.682341 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.683165 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.724980 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.725222 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9629\" (UniqueName: \"kubernetes.io/projected/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-kube-api-access-q9629\") pod \"nova-scheduler-0\" (UID: \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.725322 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-config-data\") pod \"nova-api-0\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.725399 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13cf3041-fd62-4742-a4ea-73a9c6817f51-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"13cf3041-fd62-4742-a4ea-73a9c6817f51\") " pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.725474 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/13cf3041-fd62-4742-a4ea-73a9c6817f51-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"13cf3041-fd62-4742-a4ea-73a9c6817f51\") " pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.725505 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m79f\" (UniqueName: \"kubernetes.io/projected/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-kube-api-access-5m79f\") pod \"nova-api-0\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.725557 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76qdk\" (UniqueName: \"kubernetes.io/projected/13cf3041-fd62-4742-a4ea-73a9c6817f51-kube-api-access-76qdk\") pod \"kube-state-metrics-0\" (UID: \"13cf3041-fd62-4742-a4ea-73a9c6817f51\") " pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.725588 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/13cf3041-fd62-4742-a4ea-73a9c6817f51-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"13cf3041-fd62-4742-a4ea-73a9c6817f51\") " pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.725640 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-config-data\") pod \"nova-scheduler-0\" (UID: \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.725698 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.725866 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-logs\") pod \"nova-api-0\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.726529 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-logs\") pod \"nova-api-0\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.734392 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.736968 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-config-data\") pod \"nova-api-0\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.741118 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m79f\" (UniqueName: \"kubernetes.io/projected/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-kube-api-access-5m79f\") pod \"nova-api-0\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.760609 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.761752 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.827910 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.827973 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9629\" (UniqueName: \"kubernetes.io/projected/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-kube-api-access-q9629\") pod \"nova-scheduler-0\" (UID: \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.828033 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13cf3041-fd62-4742-a4ea-73a9c6817f51-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"13cf3041-fd62-4742-a4ea-73a9c6817f51\") " pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.828105 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/13cf3041-fd62-4742-a4ea-73a9c6817f51-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"13cf3041-fd62-4742-a4ea-73a9c6817f51\") " pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.828132 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76qdk\" (UniqueName: \"kubernetes.io/projected/13cf3041-fd62-4742-a4ea-73a9c6817f51-kube-api-access-76qdk\") pod \"kube-state-metrics-0\" (UID: \"13cf3041-fd62-4742-a4ea-73a9c6817f51\") " pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.828151 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/13cf3041-fd62-4742-a4ea-73a9c6817f51-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"13cf3041-fd62-4742-a4ea-73a9c6817f51\") " pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.828169 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-config-data\") pod \"nova-scheduler-0\" (UID: \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.828494 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.832998 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-config-data\") pod \"nova-scheduler-0\" (UID: \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.833061 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/13cf3041-fd62-4742-a4ea-73a9c6817f51-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"13cf3041-fd62-4742-a4ea-73a9c6817f51\") " pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.835446 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13cf3041-fd62-4742-a4ea-73a9c6817f51-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"13cf3041-fd62-4742-a4ea-73a9c6817f51\") " pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.836193 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.836333 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/13cf3041-fd62-4742-a4ea-73a9c6817f51-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"13cf3041-fd62-4742-a4ea-73a9c6817f51\") " pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.844701 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76qdk\" (UniqueName: \"kubernetes.io/projected/13cf3041-fd62-4742-a4ea-73a9c6817f51-kube-api-access-76qdk\") pod \"kube-state-metrics-0\" (UID: \"13cf3041-fd62-4742-a4ea-73a9c6817f51\") " pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.844824 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9629\" (UniqueName: \"kubernetes.io/projected/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-kube-api-access-q9629\") pod \"nova-scheduler-0\" (UID: \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.965360 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 02 22:56:11 crc kubenswrapper[4755]: I0202 22:56:11.997591 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 22:56:12 crc kubenswrapper[4755]: I0202 22:56:12.292457 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:12 crc kubenswrapper[4755]: I0202 22:56:12.349366 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:56:12 crc kubenswrapper[4755]: I0202 22:56:12.352788 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="ceilometer-central-agent" containerID="cri-o://30b6a55eada77e771fb020e08e7dfbe3b5a58ff1fe0fb4806ecfe3a89c04dec1" gracePeriod=30 Feb 02 22:56:12 crc kubenswrapper[4755]: I0202 22:56:12.353217 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="proxy-httpd" containerID="cri-o://ce1a04046ba5b98ff52ef6409f02410d58218db03cdf0393209a055714c31ee0" gracePeriod=30 Feb 02 22:56:12 crc kubenswrapper[4755]: I0202 22:56:12.353276 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="sg-core" containerID="cri-o://3ff14072feabf6d64dcbbacef56e7e4b1e81eeda549adcbde8a78cf161418ae9" gracePeriod=30 Feb 02 22:56:12 crc kubenswrapper[4755]: I0202 22:56:12.353309 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="ceilometer-notification-agent" containerID="cri-o://e9ec323764d38671ec6dfd13409f793ebde8cdf1845ab329bfa049b4b9bfef38" gracePeriod=30 Feb 02 22:56:12 crc kubenswrapper[4755]: I0202 22:56:12.427940 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"56d2a1d2-2eee-4d5f-847e-aa26963be2bd","Type":"ContainerStarted","Data":"986260f9f4086911c90bddff4e2c2b33f70e634d213c55122a69ea91530e6f8d"} Feb 02 22:56:12 crc kubenswrapper[4755]: I0202 22:56:12.482709 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 02 22:56:12 crc kubenswrapper[4755]: I0202 22:56:12.720776 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.081805 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50c1f741-a034-4f5f-8cb2-1e7c5df9a090" path="/var/lib/kubelet/pods/50c1f741-a034-4f5f-8cb2-1e7c5df9a090/volumes" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.083029 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7634eaa3-85d9-49ba-b5c0-9190ef5d3f94" path="/var/lib/kubelet/pods/7634eaa3-85d9-49ba-b5c0-9190ef5d3f94/volumes" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.083852 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="becf5f49-d63e-4f4f-ae19-22cc57440465" path="/var/lib/kubelet/pods/becf5f49-d63e-4f4f-ae19-22cc57440465/volumes" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.450791 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"56d2a1d2-2eee-4d5f-847e-aa26963be2bd","Type":"ContainerStarted","Data":"51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339"} Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.450838 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"56d2a1d2-2eee-4d5f-847e-aa26963be2bd","Type":"ContainerStarted","Data":"a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0"} Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.454812 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"13cf3041-fd62-4742-a4ea-73a9c6817f51","Type":"ContainerStarted","Data":"4da96a149a13315c13676e8d1986e06b21c95f67dc449b52fca1a1344e14873e"} Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.454860 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"13cf3041-fd62-4742-a4ea-73a9c6817f51","Type":"ContainerStarted","Data":"8da2645e7f85ca2fb2c79df94b0f0782e35f9ae85140042e64273c20c3250d18"} Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.455355 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.460079 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd","Type":"ContainerStarted","Data":"bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573"} Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.460109 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd","Type":"ContainerStarted","Data":"0c991f1a6017cd2f8baeabdb0056e3c5d20defe757ed99b5124b7aa87ded44f3"} Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.463654 4755 generic.go:334] "Generic (PLEG): container finished" podID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerID="ce1a04046ba5b98ff52ef6409f02410d58218db03cdf0393209a055714c31ee0" exitCode=0 Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.463699 4755 generic.go:334] "Generic (PLEG): container finished" podID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerID="3ff14072feabf6d64dcbbacef56e7e4b1e81eeda549adcbde8a78cf161418ae9" exitCode=2 Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.463709 4755 generic.go:334] "Generic (PLEG): container finished" podID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerID="e9ec323764d38671ec6dfd13409f793ebde8cdf1845ab329bfa049b4b9bfef38" exitCode=0 Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.463717 4755 generic.go:334] "Generic (PLEG): container finished" podID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerID="30b6a55eada77e771fb020e08e7dfbe3b5a58ff1fe0fb4806ecfe3a89c04dec1" exitCode=0 Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.463760 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ec8849f-772e-43ac-97de-28bc9169a1b4","Type":"ContainerDied","Data":"ce1a04046ba5b98ff52ef6409f02410d58218db03cdf0393209a055714c31ee0"} Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.463781 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ec8849f-772e-43ac-97de-28bc9169a1b4","Type":"ContainerDied","Data":"3ff14072feabf6d64dcbbacef56e7e4b1e81eeda549adcbde8a78cf161418ae9"} Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.463794 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ec8849f-772e-43ac-97de-28bc9169a1b4","Type":"ContainerDied","Data":"e9ec323764d38671ec6dfd13409f793ebde8cdf1845ab329bfa049b4b9bfef38"} Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.463804 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ec8849f-772e-43ac-97de-28bc9169a1b4","Type":"ContainerDied","Data":"30b6a55eada77e771fb020e08e7dfbe3b5a58ff1fe0fb4806ecfe3a89c04dec1"} Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.473868 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.473853102 podStartE2EDuration="2.473853102s" podCreationTimestamp="2026-02-02 22:56:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:56:13.469483389 +0000 UTC m=+1329.160703715" watchObservedRunningTime="2026-02-02 22:56:13.473853102 +0000 UTC m=+1329.165073428" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.490250 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.490235341 podStartE2EDuration="2.490235341s" podCreationTimestamp="2026-02-02 22:56:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:56:13.483905434 +0000 UTC m=+1329.175125760" watchObservedRunningTime="2026-02-02 22:56:13.490235341 +0000 UTC m=+1329.181455667" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.505840 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.05936459 podStartE2EDuration="2.505819969s" podCreationTimestamp="2026-02-02 22:56:11 +0000 UTC" firstStartedPulling="2026-02-02 22:56:12.504043727 +0000 UTC m=+1328.195264053" lastFinishedPulling="2026-02-02 22:56:12.950499106 +0000 UTC m=+1328.641719432" observedRunningTime="2026-02-02 22:56:13.501829667 +0000 UTC m=+1329.193050013" watchObservedRunningTime="2026-02-02 22:56:13.505819969 +0000 UTC m=+1329.197040295" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.677354 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.808495 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-sg-core-conf-yaml\") pod \"3ec8849f-772e-43ac-97de-28bc9169a1b4\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.808565 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-config-data\") pod \"3ec8849f-772e-43ac-97de-28bc9169a1b4\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.808635 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ec8849f-772e-43ac-97de-28bc9169a1b4-log-httpd\") pod \"3ec8849f-772e-43ac-97de-28bc9169a1b4\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.808655 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-scripts\") pod \"3ec8849f-772e-43ac-97de-28bc9169a1b4\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.808768 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-combined-ca-bundle\") pod \"3ec8849f-772e-43ac-97de-28bc9169a1b4\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.808806 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gt6qn\" (UniqueName: \"kubernetes.io/projected/3ec8849f-772e-43ac-97de-28bc9169a1b4-kube-api-access-gt6qn\") pod \"3ec8849f-772e-43ac-97de-28bc9169a1b4\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.808874 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ec8849f-772e-43ac-97de-28bc9169a1b4-run-httpd\") pod \"3ec8849f-772e-43ac-97de-28bc9169a1b4\" (UID: \"3ec8849f-772e-43ac-97de-28bc9169a1b4\") " Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.809618 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ec8849f-772e-43ac-97de-28bc9169a1b4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3ec8849f-772e-43ac-97de-28bc9169a1b4" (UID: "3ec8849f-772e-43ac-97de-28bc9169a1b4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.811178 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ec8849f-772e-43ac-97de-28bc9169a1b4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3ec8849f-772e-43ac-97de-28bc9169a1b4" (UID: "3ec8849f-772e-43ac-97de-28bc9169a1b4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.813201 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-scripts" (OuterVolumeSpecName: "scripts") pod "3ec8849f-772e-43ac-97de-28bc9169a1b4" (UID: "3ec8849f-772e-43ac-97de-28bc9169a1b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.813917 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ec8849f-772e-43ac-97de-28bc9169a1b4-kube-api-access-gt6qn" (OuterVolumeSpecName: "kube-api-access-gt6qn") pod "3ec8849f-772e-43ac-97de-28bc9169a1b4" (UID: "3ec8849f-772e-43ac-97de-28bc9169a1b4"). InnerVolumeSpecName "kube-api-access-gt6qn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.836832 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3ec8849f-772e-43ac-97de-28bc9169a1b4" (UID: "3ec8849f-772e-43ac-97de-28bc9169a1b4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.894381 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ec8849f-772e-43ac-97de-28bc9169a1b4" (UID: "3ec8849f-772e-43ac-97de-28bc9169a1b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.910880 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.910910 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gt6qn\" (UniqueName: \"kubernetes.io/projected/3ec8849f-772e-43ac-97de-28bc9169a1b4-kube-api-access-gt6qn\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.910920 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ec8849f-772e-43ac-97de-28bc9169a1b4-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.910928 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.910936 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ec8849f-772e-43ac-97de-28bc9169a1b4-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.910944 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:13 crc kubenswrapper[4755]: I0202 22:56:13.919461 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-config-data" (OuterVolumeSpecName: "config-data") pod "3ec8849f-772e-43ac-97de-28bc9169a1b4" (UID: "3ec8849f-772e-43ac-97de-28bc9169a1b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.012940 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ec8849f-772e-43ac-97de-28bc9169a1b4-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.474122 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ec8849f-772e-43ac-97de-28bc9169a1b4","Type":"ContainerDied","Data":"f5e05ed044adc5259bae7751da2a3f3e30058981022538524ba1fcc8531fcf80"} Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.474184 4755 scope.go:117] "RemoveContainer" containerID="ce1a04046ba5b98ff52ef6409f02410d58218db03cdf0393209a055714c31ee0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.474205 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.503558 4755 scope.go:117] "RemoveContainer" containerID="3ff14072feabf6d64dcbbacef56e7e4b1e81eeda549adcbde8a78cf161418ae9" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.524450 4755 scope.go:117] "RemoveContainer" containerID="e9ec323764d38671ec6dfd13409f793ebde8cdf1845ab329bfa049b4b9bfef38" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.527701 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.570354 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.588112 4755 scope.go:117] "RemoveContainer" containerID="30b6a55eada77e771fb020e08e7dfbe3b5a58ff1fe0fb4806ecfe3a89c04dec1" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.591343 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:56:14 crc kubenswrapper[4755]: E0202 22:56:14.591922 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="sg-core" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.591948 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="sg-core" Feb 02 22:56:14 crc kubenswrapper[4755]: E0202 22:56:14.591964 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="ceilometer-notification-agent" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.591976 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="ceilometer-notification-agent" Feb 02 22:56:14 crc kubenswrapper[4755]: E0202 22:56:14.592013 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="proxy-httpd" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.592023 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="proxy-httpd" Feb 02 22:56:14 crc kubenswrapper[4755]: E0202 22:56:14.592039 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="ceilometer-central-agent" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.592047 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="ceilometer-central-agent" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.592308 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="proxy-httpd" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.592332 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="sg-core" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.592368 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="ceilometer-notification-agent" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.592381 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" containerName="ceilometer-central-agent" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.595191 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.598445 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.598689 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.598907 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.605381 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.727133 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-run-httpd\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.727184 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85szm\" (UniqueName: \"kubernetes.io/projected/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-kube-api-access-85szm\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.727211 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-config-data\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.727255 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.727272 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-log-httpd\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.727298 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.727350 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.727394 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-scripts\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.828742 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85szm\" (UniqueName: \"kubernetes.io/projected/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-kube-api-access-85szm\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.828803 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-config-data\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.828858 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.828875 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-log-httpd\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.828903 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.828968 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.829007 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-scripts\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.829057 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-run-httpd\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.829482 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-log-httpd\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.829581 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-run-httpd\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.833507 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.833823 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-scripts\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.834000 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.835993 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-config-data\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.838148 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.852914 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85szm\" (UniqueName: \"kubernetes.io/projected/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-kube-api-access-85szm\") pod \"ceilometer-0\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " pod="openstack/ceilometer-0" Feb 02 22:56:14 crc kubenswrapper[4755]: I0202 22:56:14.924863 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:56:15 crc kubenswrapper[4755]: I0202 22:56:15.087766 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ec8849f-772e-43ac-97de-28bc9169a1b4" path="/var/lib/kubelet/pods/3ec8849f-772e-43ac-97de-28bc9169a1b4/volumes" Feb 02 22:56:15 crc kubenswrapper[4755]: I0202 22:56:15.414529 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:56:15 crc kubenswrapper[4755]: W0202 22:56:15.424217 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf6e9ccb5_10a6_4102_aff4_a8ef99bd696f.slice/crio-a40ef438ae98b3e66ebb78276c81509f38de896869ad423611659906b2fbefc2 WatchSource:0}: Error finding container a40ef438ae98b3e66ebb78276c81509f38de896869ad423611659906b2fbefc2: Status 404 returned error can't find the container with id a40ef438ae98b3e66ebb78276c81509f38de896869ad423611659906b2fbefc2 Feb 02 22:56:15 crc kubenswrapper[4755]: I0202 22:56:15.484016 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f","Type":"ContainerStarted","Data":"a40ef438ae98b3e66ebb78276c81509f38de896869ad423611659906b2fbefc2"} Feb 02 22:56:16 crc kubenswrapper[4755]: I0202 22:56:16.498428 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f","Type":"ContainerStarted","Data":"f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07"} Feb 02 22:56:16 crc kubenswrapper[4755]: I0202 22:56:16.760221 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 22:56:16 crc kubenswrapper[4755]: I0202 22:56:16.760284 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 22:56:16 crc kubenswrapper[4755]: I0202 22:56:16.998344 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 02 22:56:17 crc kubenswrapper[4755]: I0202 22:56:17.508441 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f","Type":"ContainerStarted","Data":"97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140"} Feb 02 22:56:17 crc kubenswrapper[4755]: I0202 22:56:17.776945 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.222:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 22:56:17 crc kubenswrapper[4755]: I0202 22:56:17.776999 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.222:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 22:56:17 crc kubenswrapper[4755]: I0202 22:56:17.785261 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Feb 02 22:56:18 crc kubenswrapper[4755]: I0202 22:56:18.523213 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f","Type":"ContainerStarted","Data":"04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34"} Feb 02 22:56:20 crc kubenswrapper[4755]: I0202 22:56:20.545158 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f","Type":"ContainerStarted","Data":"176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1"} Feb 02 22:56:20 crc kubenswrapper[4755]: I0202 22:56:20.545819 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 22:56:20 crc kubenswrapper[4755]: I0202 22:56:20.585794 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.177768926 podStartE2EDuration="6.585771589s" podCreationTimestamp="2026-02-02 22:56:14 +0000 UTC" firstStartedPulling="2026-02-02 22:56:15.426921786 +0000 UTC m=+1331.118142112" lastFinishedPulling="2026-02-02 22:56:19.834924449 +0000 UTC m=+1335.526144775" observedRunningTime="2026-02-02 22:56:20.576221351 +0000 UTC m=+1336.267441677" watchObservedRunningTime="2026-02-02 22:56:20.585771589 +0000 UTC m=+1336.276991915" Feb 02 22:56:21 crc kubenswrapper[4755]: I0202 22:56:21.829511 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 22:56:21 crc kubenswrapper[4755]: I0202 22:56:21.829832 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 22:56:21 crc kubenswrapper[4755]: I0202 22:56:21.981189 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 02 22:56:21 crc kubenswrapper[4755]: I0202 22:56:21.998999 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 02 22:56:22 crc kubenswrapper[4755]: I0202 22:56:22.036929 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 02 22:56:22 crc kubenswrapper[4755]: I0202 22:56:22.597779 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 02 22:56:22 crc kubenswrapper[4755]: I0202 22:56:22.912914 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.224:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 22:56:22 crc kubenswrapper[4755]: I0202 22:56:22.913253 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.224:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 02 22:56:23 crc kubenswrapper[4755]: I0202 22:56:23.389643 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:56:23 crc kubenswrapper[4755]: I0202 22:56:23.389952 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:56:26 crc kubenswrapper[4755]: I0202 22:56:26.764441 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 22:56:26 crc kubenswrapper[4755]: I0202 22:56:26.764800 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 22:56:26 crc kubenswrapper[4755]: I0202 22:56:26.772555 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 22:56:26 crc kubenswrapper[4755]: I0202 22:56:26.774818 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.522289 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.542407 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bf32346-b983-4ad6-8e31-d4845e843f41-config-data\") pod \"3bf32346-b983-4ad6-8e31-d4845e843f41\" (UID: \"3bf32346-b983-4ad6-8e31-d4845e843f41\") " Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.542646 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77cr8\" (UniqueName: \"kubernetes.io/projected/3bf32346-b983-4ad6-8e31-d4845e843f41-kube-api-access-77cr8\") pod \"3bf32346-b983-4ad6-8e31-d4845e843f41\" (UID: \"3bf32346-b983-4ad6-8e31-d4845e843f41\") " Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.542702 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf32346-b983-4ad6-8e31-d4845e843f41-combined-ca-bundle\") pod \"3bf32346-b983-4ad6-8e31-d4845e843f41\" (UID: \"3bf32346-b983-4ad6-8e31-d4845e843f41\") " Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.554951 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bf32346-b983-4ad6-8e31-d4845e843f41-kube-api-access-77cr8" (OuterVolumeSpecName: "kube-api-access-77cr8") pod "3bf32346-b983-4ad6-8e31-d4845e843f41" (UID: "3bf32346-b983-4ad6-8e31-d4845e843f41"). InnerVolumeSpecName "kube-api-access-77cr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.631881 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf32346-b983-4ad6-8e31-d4845e843f41-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3bf32346-b983-4ad6-8e31-d4845e843f41" (UID: "3bf32346-b983-4ad6-8e31-d4845e843f41"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.641915 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf32346-b983-4ad6-8e31-d4845e843f41-config-data" (OuterVolumeSpecName: "config-data") pod "3bf32346-b983-4ad6-8e31-d4845e843f41" (UID: "3bf32346-b983-4ad6-8e31-d4845e843f41"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.647516 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77cr8\" (UniqueName: \"kubernetes.io/projected/3bf32346-b983-4ad6-8e31-d4845e843f41-kube-api-access-77cr8\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.647589 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf32346-b983-4ad6-8e31-d4845e843f41-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.647603 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bf32346-b983-4ad6-8e31-d4845e843f41-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.652622 4755 generic.go:334] "Generic (PLEG): container finished" podID="3bf32346-b983-4ad6-8e31-d4845e843f41" containerID="4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4" exitCode=137 Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.652667 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3bf32346-b983-4ad6-8e31-d4845e843f41","Type":"ContainerDied","Data":"4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4"} Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.652700 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3bf32346-b983-4ad6-8e31-d4845e843f41","Type":"ContainerDied","Data":"843a5ddc6198c6225f7173e7b1d424579ae6d4498956ce46c51834b4027c1fc0"} Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.652719 4755 scope.go:117] "RemoveContainer" containerID="4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.652911 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.702711 4755 scope.go:117] "RemoveContainer" containerID="4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4" Feb 02 22:56:29 crc kubenswrapper[4755]: E0202 22:56:29.705440 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4\": container with ID starting with 4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4 not found: ID does not exist" containerID="4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.705481 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4"} err="failed to get container status \"4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4\": rpc error: code = NotFound desc = could not find container \"4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4\": container with ID starting with 4debd88f138e5c3f5235e63e6198f01d3d08ecbd485cd3886b7ef5bd5e62a3c4 not found: ID does not exist" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.708775 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.717249 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.737838 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 22:56:29 crc kubenswrapper[4755]: E0202 22:56:29.738267 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bf32346-b983-4ad6-8e31-d4845e843f41" containerName="nova-cell1-novncproxy-novncproxy" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.738285 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bf32346-b983-4ad6-8e31-d4845e843f41" containerName="nova-cell1-novncproxy-novncproxy" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.738488 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bf32346-b983-4ad6-8e31-d4845e843f41" containerName="nova-cell1-novncproxy-novncproxy" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.740140 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.769182 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.769247 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzxfj\" (UniqueName: \"kubernetes.io/projected/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-kube-api-access-mzxfj\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.769586 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.769662 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.769748 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.779397 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.779530 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.779759 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.783482 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.871629 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.872269 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.872488 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.872588 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.872668 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzxfj\" (UniqueName: \"kubernetes.io/projected/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-kube-api-access-mzxfj\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.874603 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.875704 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.876030 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.876340 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:29 crc kubenswrapper[4755]: I0202 22:56:29.893561 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzxfj\" (UniqueName: \"kubernetes.io/projected/c4ed5cb5-496b-4d30-9134-6fe50ebe4759-kube-api-access-mzxfj\") pod \"nova-cell1-novncproxy-0\" (UID: \"c4ed5cb5-496b-4d30-9134-6fe50ebe4759\") " pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:30 crc kubenswrapper[4755]: I0202 22:56:30.055669 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:30 crc kubenswrapper[4755]: I0202 22:56:30.575755 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 02 22:56:30 crc kubenswrapper[4755]: I0202 22:56:30.669673 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c4ed5cb5-496b-4d30-9134-6fe50ebe4759","Type":"ContainerStarted","Data":"98ff1075248022bf4c1f2b2207a00e2a933bbf326b338cc4753abb2ad778fa02"} Feb 02 22:56:31 crc kubenswrapper[4755]: I0202 22:56:31.084184 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bf32346-b983-4ad6-8e31-d4845e843f41" path="/var/lib/kubelet/pods/3bf32346-b983-4ad6-8e31-d4845e843f41/volumes" Feb 02 22:56:31 crc kubenswrapper[4755]: I0202 22:56:31.693675 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c4ed5cb5-496b-4d30-9134-6fe50ebe4759","Type":"ContainerStarted","Data":"9e6afe876d5ce5540a1f692e3e0aa43ef48287c948bca916ebc434d38dcfff9c"} Feb 02 22:56:31 crc kubenswrapper[4755]: I0202 22:56:31.731904 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.731884269 podStartE2EDuration="2.731884269s" podCreationTimestamp="2026-02-02 22:56:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:56:31.716919319 +0000 UTC m=+1347.408139735" watchObservedRunningTime="2026-02-02 22:56:31.731884269 +0000 UTC m=+1347.423104595" Feb 02 22:56:31 crc kubenswrapper[4755]: I0202 22:56:31.832093 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 22:56:31 crc kubenswrapper[4755]: I0202 22:56:31.832860 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 22:56:31 crc kubenswrapper[4755]: I0202 22:56:31.832962 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 22:56:31 crc kubenswrapper[4755]: I0202 22:56:31.835945 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 22:56:32 crc kubenswrapper[4755]: I0202 22:56:32.704887 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 22:56:32 crc kubenswrapper[4755]: I0202 22:56:32.708538 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 22:56:32 crc kubenswrapper[4755]: I0202 22:56:32.941601 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-54dd998c-smhc4"] Feb 02 22:56:32 crc kubenswrapper[4755]: I0202 22:56:32.944876 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:32 crc kubenswrapper[4755]: I0202 22:56:32.960813 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54dd998c-smhc4"] Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.048399 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-dns-swift-storage-0\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.048448 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bb4jx\" (UniqueName: \"kubernetes.io/projected/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-kube-api-access-bb4jx\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.048490 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-ovsdbserver-sb\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.048736 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-ovsdbserver-nb\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.048983 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-config\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.049042 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-dns-svc\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.150935 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-config\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.150993 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-dns-svc\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.151102 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-dns-swift-storage-0\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.151127 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bb4jx\" (UniqueName: \"kubernetes.io/projected/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-kube-api-access-bb4jx\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.151162 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-ovsdbserver-sb\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.151247 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-ovsdbserver-nb\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.152176 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-ovsdbserver-nb\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.152775 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-dns-svc\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.153124 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-config\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.154899 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-dns-swift-storage-0\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.154938 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-ovsdbserver-sb\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.199808 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bb4jx\" (UniqueName: \"kubernetes.io/projected/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-kube-api-access-bb4jx\") pod \"dnsmasq-dns-54dd998c-smhc4\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.296237 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:33 crc kubenswrapper[4755]: I0202 22:56:33.856232 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54dd998c-smhc4"] Feb 02 22:56:34 crc kubenswrapper[4755]: I0202 22:56:34.734620 4755 generic.go:334] "Generic (PLEG): container finished" podID="a88f43d2-9cf6-43a0-b2bd-f945f279eea2" containerID="d9f916b5236f92a13e20fb68cb4274cb4be5f7ad83a015f555cb3d24c7feaa24" exitCode=0 Feb 02 22:56:34 crc kubenswrapper[4755]: I0202 22:56:34.734696 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54dd998c-smhc4" event={"ID":"a88f43d2-9cf6-43a0-b2bd-f945f279eea2","Type":"ContainerDied","Data":"d9f916b5236f92a13e20fb68cb4274cb4be5f7ad83a015f555cb3d24c7feaa24"} Feb 02 22:56:34 crc kubenswrapper[4755]: I0202 22:56:34.735584 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54dd998c-smhc4" event={"ID":"a88f43d2-9cf6-43a0-b2bd-f945f279eea2","Type":"ContainerStarted","Data":"889dac17318b027c4c3bedd8aae2eea61c50ead1276d2c7a3c1ed336520cd3ac"} Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.056709 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.106857 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.107130 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="ceilometer-central-agent" containerID="cri-o://f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07" gracePeriod=30 Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.107251 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="proxy-httpd" containerID="cri-o://176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1" gracePeriod=30 Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.107293 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="sg-core" containerID="cri-o://04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34" gracePeriod=30 Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.107324 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="ceilometer-notification-agent" containerID="cri-o://97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140" gracePeriod=30 Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.210510 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.227:3000/\": read tcp 10.217.0.2:40404->10.217.0.227:3000: read: connection reset by peer" Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.721775 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.851478 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54dd998c-smhc4" event={"ID":"a88f43d2-9cf6-43a0-b2bd-f945f279eea2","Type":"ContainerStarted","Data":"bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d"} Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.851920 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.869865 4755 generic.go:334] "Generic (PLEG): container finished" podID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerID="176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1" exitCode=0 Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.873911 4755 generic.go:334] "Generic (PLEG): container finished" podID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerID="04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34" exitCode=2 Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.873949 4755 generic.go:334] "Generic (PLEG): container finished" podID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerID="f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07" exitCode=0 Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.869995 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f","Type":"ContainerDied","Data":"176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1"} Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.874007 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f","Type":"ContainerDied","Data":"04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34"} Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.874032 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f","Type":"ContainerDied","Data":"f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07"} Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.874129 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" containerName="nova-api-log" containerID="cri-o://a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0" gracePeriod=30 Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.874451 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" containerName="nova-api-api" containerID="cri-o://51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339" gracePeriod=30 Feb 02 22:56:35 crc kubenswrapper[4755]: I0202 22:56:35.878574 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-54dd998c-smhc4" podStartSLOduration=3.878555989 podStartE2EDuration="3.878555989s" podCreationTimestamp="2026-02-02 22:56:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:56:35.870646047 +0000 UTC m=+1351.561866383" watchObservedRunningTime="2026-02-02 22:56:35.878555989 +0000 UTC m=+1351.569776315" Feb 02 22:56:36 crc kubenswrapper[4755]: I0202 22:56:36.885865 4755 generic.go:334] "Generic (PLEG): container finished" podID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" containerID="a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0" exitCode=143 Feb 02 22:56:36 crc kubenswrapper[4755]: I0202 22:56:36.885952 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"56d2a1d2-2eee-4d5f-847e-aa26963be2bd","Type":"ContainerDied","Data":"a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0"} Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.638531 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.771247 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-config-data\") pod \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.771673 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-combined-ca-bundle\") pod \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.771767 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-scripts\") pod \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.771798 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85szm\" (UniqueName: \"kubernetes.io/projected/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-kube-api-access-85szm\") pod \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.771819 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-log-httpd\") pod \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.771848 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-ceilometer-tls-certs\") pod \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.771926 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-run-httpd\") pod \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.771972 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-sg-core-conf-yaml\") pod \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\" (UID: \"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f\") " Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.772345 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" (UID: "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.772376 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" (UID: "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.772964 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.772991 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.778638 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-kube-api-access-85szm" (OuterVolumeSpecName: "kube-api-access-85szm") pod "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" (UID: "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f"). InnerVolumeSpecName "kube-api-access-85szm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.779881 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-scripts" (OuterVolumeSpecName: "scripts") pod "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" (UID: "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.824869 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" (UID: "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.835948 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" (UID: "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.875183 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" (UID: "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.875348 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.875492 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.875523 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85szm\" (UniqueName: \"kubernetes.io/projected/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-kube-api-access-85szm\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.875548 4755 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.900305 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-config-data" (OuterVolumeSpecName: "config-data") pod "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" (UID: "f6e9ccb5-10a6-4102-aff4-a8ef99bd696f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.909361 4755 generic.go:334] "Generic (PLEG): container finished" podID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerID="97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140" exitCode=0 Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.909408 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f","Type":"ContainerDied","Data":"97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140"} Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.909441 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e9ccb5-10a6-4102-aff4-a8ef99bd696f","Type":"ContainerDied","Data":"a40ef438ae98b3e66ebb78276c81509f38de896869ad423611659906b2fbefc2"} Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.909462 4755 scope.go:117] "RemoveContainer" containerID="176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.909608 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.940880 4755 scope.go:117] "RemoveContainer" containerID="04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.950400 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.960788 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.974690 4755 scope.go:117] "RemoveContainer" containerID="97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.981629 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:56:38 crc kubenswrapper[4755]: E0202 22:56:38.982144 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="ceilometer-notification-agent" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.982168 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="ceilometer-notification-agent" Feb 02 22:56:38 crc kubenswrapper[4755]: E0202 22:56:38.982185 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="sg-core" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.982195 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="sg-core" Feb 02 22:56:38 crc kubenswrapper[4755]: E0202 22:56:38.982219 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="proxy-httpd" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.982227 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="proxy-httpd" Feb 02 22:56:38 crc kubenswrapper[4755]: E0202 22:56:38.982251 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="ceilometer-central-agent" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.982259 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="ceilometer-central-agent" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.982510 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="ceilometer-central-agent" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.982541 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="ceilometer-notification-agent" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.982562 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="sg-core" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.982581 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" containerName="proxy-httpd" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.987832 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.987873 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:38 crc kubenswrapper[4755]: I0202 22:56:38.997106 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.001530 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.001994 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.002296 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.002880 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.006777 4755 scope.go:117] "RemoveContainer" containerID="f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.033569 4755 scope.go:117] "RemoveContainer" containerID="176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1" Feb 02 22:56:39 crc kubenswrapper[4755]: E0202 22:56:39.034439 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1\": container with ID starting with 176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1 not found: ID does not exist" containerID="176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.034468 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1"} err="failed to get container status \"176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1\": rpc error: code = NotFound desc = could not find container \"176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1\": container with ID starting with 176df803f8c9856ab5ce939593e39185b1c449d4ef9f2e4a9dfb154da96ec2f1 not found: ID does not exist" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.034490 4755 scope.go:117] "RemoveContainer" containerID="04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34" Feb 02 22:56:39 crc kubenswrapper[4755]: E0202 22:56:39.034944 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34\": container with ID starting with 04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34 not found: ID does not exist" containerID="04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.034997 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34"} err="failed to get container status \"04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34\": rpc error: code = NotFound desc = could not find container \"04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34\": container with ID starting with 04e4b70a4cb343808f6028756135ededdbaf9e856891761c656bba7ecc6cde34 not found: ID does not exist" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.035032 4755 scope.go:117] "RemoveContainer" containerID="97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140" Feb 02 22:56:39 crc kubenswrapper[4755]: E0202 22:56:39.038307 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140\": container with ID starting with 97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140 not found: ID does not exist" containerID="97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.038361 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140"} err="failed to get container status \"97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140\": rpc error: code = NotFound desc = could not find container \"97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140\": container with ID starting with 97083cc24a5705bb4ccb5ec780218093bcdb2817ed690bc6daa48c2f28383140 not found: ID does not exist" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.038396 4755 scope.go:117] "RemoveContainer" containerID="f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07" Feb 02 22:56:39 crc kubenswrapper[4755]: E0202 22:56:39.038750 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07\": container with ID starting with f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07 not found: ID does not exist" containerID="f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.038793 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07"} err="failed to get container status \"f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07\": rpc error: code = NotFound desc = could not find container \"f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07\": container with ID starting with f39615e46f20fc6633e2402dcf69bc08d22d06a7453c46d6b9ee8d2ca646bd07 not found: ID does not exist" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.083770 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6e9ccb5-10a6-4102-aff4-a8ef99bd696f" path="/var/lib/kubelet/pods/f6e9ccb5-10a6-4102-aff4-a8ef99bd696f/volumes" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.190550 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af709f14-439f-4a0f-bf46-c23ae0483426-run-httpd\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.190611 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-config-data\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.191630 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.191763 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-scripts\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.192023 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcgb7\" (UniqueName: \"kubernetes.io/projected/af709f14-439f-4a0f-bf46-c23ae0483426-kube-api-access-qcgb7\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.195408 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.195479 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.195581 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af709f14-439f-4a0f-bf46-c23ae0483426-log-httpd\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.297738 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-scripts\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.298026 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcgb7\" (UniqueName: \"kubernetes.io/projected/af709f14-439f-4a0f-bf46-c23ae0483426-kube-api-access-qcgb7\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.298061 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.298082 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.298128 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af709f14-439f-4a0f-bf46-c23ae0483426-log-httpd\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.298157 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af709f14-439f-4a0f-bf46-c23ae0483426-run-httpd\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.298186 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-config-data\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.298258 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.299601 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af709f14-439f-4a0f-bf46-c23ae0483426-log-httpd\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.301219 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af709f14-439f-4a0f-bf46-c23ae0483426-run-httpd\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.306201 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.308370 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.310159 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-scripts\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.310449 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.314673 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-config-data\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.334716 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcgb7\" (UniqueName: \"kubernetes.io/projected/af709f14-439f-4a0f-bf46-c23ae0483426-kube-api-access-qcgb7\") pod \"ceilometer-0\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.619747 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.632923 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.718015 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-combined-ca-bundle\") pod \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.718229 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5m79f\" (UniqueName: \"kubernetes.io/projected/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-kube-api-access-5m79f\") pod \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.718314 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-logs\") pod \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.718348 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-config-data\") pod \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\" (UID: \"56d2a1d2-2eee-4d5f-847e-aa26963be2bd\") " Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.719956 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-logs" (OuterVolumeSpecName: "logs") pod "56d2a1d2-2eee-4d5f-847e-aa26963be2bd" (UID: "56d2a1d2-2eee-4d5f-847e-aa26963be2bd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.724395 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-kube-api-access-5m79f" (OuterVolumeSpecName: "kube-api-access-5m79f") pod "56d2a1d2-2eee-4d5f-847e-aa26963be2bd" (UID: "56d2a1d2-2eee-4d5f-847e-aa26963be2bd"). InnerVolumeSpecName "kube-api-access-5m79f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.749229 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56d2a1d2-2eee-4d5f-847e-aa26963be2bd" (UID: "56d2a1d2-2eee-4d5f-847e-aa26963be2bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.757773 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-config-data" (OuterVolumeSpecName: "config-data") pod "56d2a1d2-2eee-4d5f-847e-aa26963be2bd" (UID: "56d2a1d2-2eee-4d5f-847e-aa26963be2bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.820745 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.820783 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5m79f\" (UniqueName: \"kubernetes.io/projected/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-kube-api-access-5m79f\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.820801 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.820812 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56d2a1d2-2eee-4d5f-847e-aa26963be2bd-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.940292 4755 generic.go:334] "Generic (PLEG): container finished" podID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" containerID="51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339" exitCode=0 Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.940468 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.940500 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"56d2a1d2-2eee-4d5f-847e-aa26963be2bd","Type":"ContainerDied","Data":"51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339"} Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.940595 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"56d2a1d2-2eee-4d5f-847e-aa26963be2bd","Type":"ContainerDied","Data":"986260f9f4086911c90bddff4e2c2b33f70e634d213c55122a69ea91530e6f8d"} Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.940623 4755 scope.go:117] "RemoveContainer" containerID="51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339" Feb 02 22:56:39 crc kubenswrapper[4755]: I0202 22:56:39.962686 4755 scope.go:117] "RemoveContainer" containerID="a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.009908 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.010155 4755 scope.go:117] "RemoveContainer" containerID="51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339" Feb 02 22:56:40 crc kubenswrapper[4755]: E0202 22:56:40.013765 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339\": container with ID starting with 51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339 not found: ID does not exist" containerID="51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.013829 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339"} err="failed to get container status \"51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339\": rpc error: code = NotFound desc = could not find container \"51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339\": container with ID starting with 51274842ee3f7f98a5b34c6f0e0b077387b07bc7b2c2a1f513544e65c0912339 not found: ID does not exist" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.013862 4755 scope.go:117] "RemoveContainer" containerID="a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0" Feb 02 22:56:40 crc kubenswrapper[4755]: E0202 22:56:40.019084 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0\": container with ID starting with a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0 not found: ID does not exist" containerID="a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.019144 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0"} err="failed to get container status \"a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0\": rpc error: code = NotFound desc = could not find container \"a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0\": container with ID starting with a0d1f3368d50cb125e775efe5caebd28c840209b328f3eef0acc55aab3f089c0 not found: ID does not exist" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.023718 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.034782 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:40 crc kubenswrapper[4755]: E0202 22:56:40.035202 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" containerName="nova-api-log" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.035220 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" containerName="nova-api-log" Feb 02 22:56:40 crc kubenswrapper[4755]: E0202 22:56:40.035242 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" containerName="nova-api-api" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.035248 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" containerName="nova-api-api" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.035448 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" containerName="nova-api-log" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.035671 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" containerName="nova-api-api" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.037557 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.045130 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.045181 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.045406 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.046122 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.057212 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.075317 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.133988 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-public-tls-certs\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.134576 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.134741 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7njr9\" (UniqueName: \"kubernetes.io/projected/3b23f3b7-a57f-4ae3-8760-45af18159a6d-kube-api-access-7njr9\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.134873 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b23f3b7-a57f-4ae3-8760-45af18159a6d-logs\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.134969 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-config-data\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.135070 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.192080 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.236934 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7njr9\" (UniqueName: \"kubernetes.io/projected/3b23f3b7-a57f-4ae3-8760-45af18159a6d-kube-api-access-7njr9\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.237040 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b23f3b7-a57f-4ae3-8760-45af18159a6d-logs\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.237669 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b23f3b7-a57f-4ae3-8760-45af18159a6d-logs\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.237116 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-config-data\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.237841 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.237965 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-public-tls-certs\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.238159 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.242619 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.242750 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-config-data\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.245261 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-public-tls-certs\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.251035 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.254540 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7njr9\" (UniqueName: \"kubernetes.io/projected/3b23f3b7-a57f-4ae3-8760-45af18159a6d-kube-api-access-7njr9\") pod \"nova-api-0\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.374025 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.889942 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.954854 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af709f14-439f-4a0f-bf46-c23ae0483426","Type":"ContainerStarted","Data":"5a46ca6e96312583fb20fc9bd2c690b153fda0582b8717139ba355ea7ccdfac7"} Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.955786 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3b23f3b7-a57f-4ae3-8760-45af18159a6d","Type":"ContainerStarted","Data":"8281e82a29a560d6b46043e8a35f1692da232496091cb82b630fc8a0586f72f1"} Feb 02 22:56:40 crc kubenswrapper[4755]: I0202 22:56:40.977058 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.097321 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56d2a1d2-2eee-4d5f-847e-aa26963be2bd" path="/var/lib/kubelet/pods/56d2a1d2-2eee-4d5f-847e-aa26963be2bd/volumes" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.315835 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-qbjlm"] Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.317318 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.320875 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.321592 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.324752 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-qbjlm"] Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.368274 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv557\" (UniqueName: \"kubernetes.io/projected/35a5dd6b-95f6-43c8-b929-bd370640aa10-kube-api-access-pv557\") pod \"nova-cell1-cell-mapping-qbjlm\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.368626 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-qbjlm\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.368655 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-config-data\") pod \"nova-cell1-cell-mapping-qbjlm\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.368681 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-scripts\") pod \"nova-cell1-cell-mapping-qbjlm\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.472402 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv557\" (UniqueName: \"kubernetes.io/projected/35a5dd6b-95f6-43c8-b929-bd370640aa10-kube-api-access-pv557\") pod \"nova-cell1-cell-mapping-qbjlm\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.472512 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-qbjlm\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.474648 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-config-data\") pod \"nova-cell1-cell-mapping-qbjlm\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.474755 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-scripts\") pod \"nova-cell1-cell-mapping-qbjlm\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.479448 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-qbjlm\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.480007 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-config-data\") pod \"nova-cell1-cell-mapping-qbjlm\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.481609 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-scripts\") pod \"nova-cell1-cell-mapping-qbjlm\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.518584 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv557\" (UniqueName: \"kubernetes.io/projected/35a5dd6b-95f6-43c8-b929-bd370640aa10-kube-api-access-pv557\") pod \"nova-cell1-cell-mapping-qbjlm\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.656417 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.979147 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af709f14-439f-4a0f-bf46-c23ae0483426","Type":"ContainerStarted","Data":"351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd"} Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.979190 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af709f14-439f-4a0f-bf46-c23ae0483426","Type":"ContainerStarted","Data":"2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5"} Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.982274 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3b23f3b7-a57f-4ae3-8760-45af18159a6d","Type":"ContainerStarted","Data":"66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da"} Feb 02 22:56:41 crc kubenswrapper[4755]: I0202 22:56:41.982295 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3b23f3b7-a57f-4ae3-8760-45af18159a6d","Type":"ContainerStarted","Data":"c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797"} Feb 02 22:56:42 crc kubenswrapper[4755]: I0202 22:56:42.010476 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.010457832 podStartE2EDuration="3.010457832s" podCreationTimestamp="2026-02-02 22:56:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:56:41.998140156 +0000 UTC m=+1357.689360502" watchObservedRunningTime="2026-02-02 22:56:42.010457832 +0000 UTC m=+1357.701678158" Feb 02 22:56:42 crc kubenswrapper[4755]: I0202 22:56:42.203510 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-qbjlm"] Feb 02 22:56:42 crc kubenswrapper[4755]: I0202 22:56:42.993625 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af709f14-439f-4a0f-bf46-c23ae0483426","Type":"ContainerStarted","Data":"f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56"} Feb 02 22:56:42 crc kubenswrapper[4755]: I0202 22:56:42.995837 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-qbjlm" event={"ID":"35a5dd6b-95f6-43c8-b929-bd370640aa10","Type":"ContainerStarted","Data":"ad5a820a59f4260604fdb19aaaf313ac4f4580866dc5fc6ca733d716e78c3e84"} Feb 02 22:56:42 crc kubenswrapper[4755]: I0202 22:56:42.995866 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-qbjlm" event={"ID":"35a5dd6b-95f6-43c8-b929-bd370640aa10","Type":"ContainerStarted","Data":"2ce4d700bdcbb0a82b4468ab76980d3de14fe945c5ad6e6ca1db9946a3a1040f"} Feb 02 22:56:43 crc kubenswrapper[4755]: I0202 22:56:43.023954 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-qbjlm" podStartSLOduration=2.023936821 podStartE2EDuration="2.023936821s" podCreationTimestamp="2026-02-02 22:56:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:56:43.015768291 +0000 UTC m=+1358.706988637" watchObservedRunningTime="2026-02-02 22:56:43.023936821 +0000 UTC m=+1358.715157147" Feb 02 22:56:43 crc kubenswrapper[4755]: I0202 22:56:43.298885 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:56:43 crc kubenswrapper[4755]: I0202 22:56:43.369955 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-884c8b8f5-sqb6n"] Feb 02 22:56:43 crc kubenswrapper[4755]: I0202 22:56:43.370236 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" podUID="e9d11d8c-3a46-46bd-b2cc-01f736ccb878" containerName="dnsmasq-dns" containerID="cri-o://0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452" gracePeriod=10 Feb 02 22:56:43 crc kubenswrapper[4755]: I0202 22:56:43.963018 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.033174 4755 generic.go:334] "Generic (PLEG): container finished" podID="e9d11d8c-3a46-46bd-b2cc-01f736ccb878" containerID="0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452" exitCode=0 Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.034170 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.038780 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" event={"ID":"e9d11d8c-3a46-46bd-b2cc-01f736ccb878","Type":"ContainerDied","Data":"0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452"} Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.038843 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" event={"ID":"e9d11d8c-3a46-46bd-b2cc-01f736ccb878","Type":"ContainerDied","Data":"14080d75936791b8b492fac4170fbd35cbbfa86688bb9ddd2e3e30a62419ceba"} Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.038872 4755 scope.go:117] "RemoveContainer" containerID="0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.062010 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqwlw\" (UniqueName: \"kubernetes.io/projected/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-kube-api-access-qqwlw\") pod \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.062049 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-dns-svc\") pod \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.062072 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-dns-swift-storage-0\") pod \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.062152 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-ovsdbserver-nb\") pod \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.062319 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-ovsdbserver-sb\") pod \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.062461 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-config\") pod \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\" (UID: \"e9d11d8c-3a46-46bd-b2cc-01f736ccb878\") " Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.068242 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-kube-api-access-qqwlw" (OuterVolumeSpecName: "kube-api-access-qqwlw") pod "e9d11d8c-3a46-46bd-b2cc-01f736ccb878" (UID: "e9d11d8c-3a46-46bd-b2cc-01f736ccb878"). InnerVolumeSpecName "kube-api-access-qqwlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.068575 4755 scope.go:117] "RemoveContainer" containerID="b6aa22fbbf97efc511476b23f08b6ccde4658ce867827e0627a0f615eaa7452e" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.129565 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e9d11d8c-3a46-46bd-b2cc-01f736ccb878" (UID: "e9d11d8c-3a46-46bd-b2cc-01f736ccb878"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.134245 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e9d11d8c-3a46-46bd-b2cc-01f736ccb878" (UID: "e9d11d8c-3a46-46bd-b2cc-01f736ccb878"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.137404 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e9d11d8c-3a46-46bd-b2cc-01f736ccb878" (UID: "e9d11d8c-3a46-46bd-b2cc-01f736ccb878"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.159131 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e9d11d8c-3a46-46bd-b2cc-01f736ccb878" (UID: "e9d11d8c-3a46-46bd-b2cc-01f736ccb878"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.166687 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqwlw\" (UniqueName: \"kubernetes.io/projected/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-kube-api-access-qqwlw\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.166721 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.166929 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.166942 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.166955 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.168240 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-config" (OuterVolumeSpecName: "config") pod "e9d11d8c-3a46-46bd-b2cc-01f736ccb878" (UID: "e9d11d8c-3a46-46bd-b2cc-01f736ccb878"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.206917 4755 scope.go:117] "RemoveContainer" containerID="0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452" Feb 02 22:56:44 crc kubenswrapper[4755]: E0202 22:56:44.207320 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452\": container with ID starting with 0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452 not found: ID does not exist" containerID="0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.207351 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452"} err="failed to get container status \"0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452\": rpc error: code = NotFound desc = could not find container \"0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452\": container with ID starting with 0b869454a89ba404d700b4c05c55df7d9a04fc67a039c42968a379ddf6a42452 not found: ID does not exist" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.207372 4755 scope.go:117] "RemoveContainer" containerID="b6aa22fbbf97efc511476b23f08b6ccde4658ce867827e0627a0f615eaa7452e" Feb 02 22:56:44 crc kubenswrapper[4755]: E0202 22:56:44.207648 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6aa22fbbf97efc511476b23f08b6ccde4658ce867827e0627a0f615eaa7452e\": container with ID starting with b6aa22fbbf97efc511476b23f08b6ccde4658ce867827e0627a0f615eaa7452e not found: ID does not exist" containerID="b6aa22fbbf97efc511476b23f08b6ccde4658ce867827e0627a0f615eaa7452e" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.207695 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6aa22fbbf97efc511476b23f08b6ccde4658ce867827e0627a0f615eaa7452e"} err="failed to get container status \"b6aa22fbbf97efc511476b23f08b6ccde4658ce867827e0627a0f615eaa7452e\": rpc error: code = NotFound desc = could not find container \"b6aa22fbbf97efc511476b23f08b6ccde4658ce867827e0627a0f615eaa7452e\": container with ID starting with b6aa22fbbf97efc511476b23f08b6ccde4658ce867827e0627a0f615eaa7452e not found: ID does not exist" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.268667 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9d11d8c-3a46-46bd-b2cc-01f736ccb878-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.382028 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-884c8b8f5-sqb6n"] Feb 02 22:56:44 crc kubenswrapper[4755]: I0202 22:56:44.394349 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-884c8b8f5-sqb6n"] Feb 02 22:56:45 crc kubenswrapper[4755]: I0202 22:56:45.081545 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9d11d8c-3a46-46bd-b2cc-01f736ccb878" path="/var/lib/kubelet/pods/e9d11d8c-3a46-46bd-b2cc-01f736ccb878/volumes" Feb 02 22:56:46 crc kubenswrapper[4755]: I0202 22:56:46.090636 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af709f14-439f-4a0f-bf46-c23ae0483426","Type":"ContainerStarted","Data":"5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15"} Feb 02 22:56:46 crc kubenswrapper[4755]: I0202 22:56:46.094020 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 22:56:46 crc kubenswrapper[4755]: I0202 22:56:46.154481 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.435758276 podStartE2EDuration="8.154461108s" podCreationTimestamp="2026-02-02 22:56:38 +0000 UTC" firstStartedPulling="2026-02-02 22:56:40.195540538 +0000 UTC m=+1355.886760864" lastFinishedPulling="2026-02-02 22:56:44.91424337 +0000 UTC m=+1360.605463696" observedRunningTime="2026-02-02 22:56:46.132218174 +0000 UTC m=+1361.823438500" watchObservedRunningTime="2026-02-02 22:56:46.154461108 +0000 UTC m=+1361.845681434" Feb 02 22:56:48 crc kubenswrapper[4755]: I0202 22:56:48.124318 4755 generic.go:334] "Generic (PLEG): container finished" podID="35a5dd6b-95f6-43c8-b929-bd370640aa10" containerID="ad5a820a59f4260604fdb19aaaf313ac4f4580866dc5fc6ca733d716e78c3e84" exitCode=0 Feb 02 22:56:48 crc kubenswrapper[4755]: I0202 22:56:48.124455 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-qbjlm" event={"ID":"35a5dd6b-95f6-43c8-b929-bd370640aa10","Type":"ContainerDied","Data":"ad5a820a59f4260604fdb19aaaf313ac4f4580866dc5fc6ca733d716e78c3e84"} Feb 02 22:56:48 crc kubenswrapper[4755]: I0202 22:56:48.900691 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-884c8b8f5-sqb6n" podUID="e9d11d8c-3a46-46bd-b2cc-01f736ccb878" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.219:5353: i/o timeout" Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.702818 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.809345 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-config-data\") pod \"35a5dd6b-95f6-43c8-b929-bd370640aa10\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.809788 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-combined-ca-bundle\") pod \"35a5dd6b-95f6-43c8-b929-bd370640aa10\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.809952 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pv557\" (UniqueName: \"kubernetes.io/projected/35a5dd6b-95f6-43c8-b929-bd370640aa10-kube-api-access-pv557\") pod \"35a5dd6b-95f6-43c8-b929-bd370640aa10\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.810117 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-scripts\") pod \"35a5dd6b-95f6-43c8-b929-bd370640aa10\" (UID: \"35a5dd6b-95f6-43c8-b929-bd370640aa10\") " Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.816682 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35a5dd6b-95f6-43c8-b929-bd370640aa10-kube-api-access-pv557" (OuterVolumeSpecName: "kube-api-access-pv557") pod "35a5dd6b-95f6-43c8-b929-bd370640aa10" (UID: "35a5dd6b-95f6-43c8-b929-bd370640aa10"). InnerVolumeSpecName "kube-api-access-pv557". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.818221 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-scripts" (OuterVolumeSpecName: "scripts") pod "35a5dd6b-95f6-43c8-b929-bd370640aa10" (UID: "35a5dd6b-95f6-43c8-b929-bd370640aa10"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.853886 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35a5dd6b-95f6-43c8-b929-bd370640aa10" (UID: "35a5dd6b-95f6-43c8-b929-bd370640aa10"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.875776 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-config-data" (OuterVolumeSpecName: "config-data") pod "35a5dd6b-95f6-43c8-b929-bd370640aa10" (UID: "35a5dd6b-95f6-43c8-b929-bd370640aa10"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.914350 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pv557\" (UniqueName: \"kubernetes.io/projected/35a5dd6b-95f6-43c8-b929-bd370640aa10-kube-api-access-pv557\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.914382 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.914392 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:49 crc kubenswrapper[4755]: I0202 22:56:49.914402 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a5dd6b-95f6-43c8-b929-bd370640aa10-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:50 crc kubenswrapper[4755]: I0202 22:56:50.149658 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-qbjlm" event={"ID":"35a5dd6b-95f6-43c8-b929-bd370640aa10","Type":"ContainerDied","Data":"2ce4d700bdcbb0a82b4468ab76980d3de14fe945c5ad6e6ca1db9946a3a1040f"} Feb 02 22:56:50 crc kubenswrapper[4755]: I0202 22:56:50.149705 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ce4d700bdcbb0a82b4468ab76980d3de14fe945c5ad6e6ca1db9946a3a1040f" Feb 02 22:56:50 crc kubenswrapper[4755]: I0202 22:56:50.149802 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-qbjlm" Feb 02 22:56:50 crc kubenswrapper[4755]: I0202 22:56:50.372578 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:56:50 crc kubenswrapper[4755]: I0202 22:56:50.372884 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd" containerName="nova-scheduler-scheduler" containerID="cri-o://bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573" gracePeriod=30 Feb 02 22:56:50 crc kubenswrapper[4755]: I0202 22:56:50.377409 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 22:56:50 crc kubenswrapper[4755]: I0202 22:56:50.377453 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 22:56:50 crc kubenswrapper[4755]: I0202 22:56:50.382076 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:50 crc kubenswrapper[4755]: I0202 22:56:50.470755 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:50 crc kubenswrapper[4755]: I0202 22:56:50.471039 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerName="nova-metadata-log" containerID="cri-o://68c9dce7fd52a56c8b74a82b43ffca965bfe2e63bed9fb7e4fd1976b68e3a2ff" gracePeriod=30 Feb 02 22:56:50 crc kubenswrapper[4755]: I0202 22:56:50.471137 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerName="nova-metadata-metadata" containerID="cri-o://7316a42b2c095cd11c736b2760f5f4866edbf63c1a5df1fc7145e44ca64bd1a6" gracePeriod=30 Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.160602 4755 generic.go:334] "Generic (PLEG): container finished" podID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerID="68c9dce7fd52a56c8b74a82b43ffca965bfe2e63bed9fb7e4fd1976b68e3a2ff" exitCode=143 Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.160829 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6","Type":"ContainerDied","Data":"68c9dce7fd52a56c8b74a82b43ffca965bfe2e63bed9fb7e4fd1976b68e3a2ff"} Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.162427 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" containerName="nova-api-api" containerID="cri-o://66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da" gracePeriod=30 Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.162327 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" containerName="nova-api-log" containerID="cri-o://c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797" gracePeriod=30 Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.166187 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.231:8774/\": EOF" Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.166638 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.231:8774/\": EOF" Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.625896 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.763249 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9629\" (UniqueName: \"kubernetes.io/projected/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-kube-api-access-q9629\") pod \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\" (UID: \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\") " Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.763407 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-combined-ca-bundle\") pod \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\" (UID: \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\") " Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.763491 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-config-data\") pod \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\" (UID: \"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd\") " Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.771895 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-kube-api-access-q9629" (OuterVolumeSpecName: "kube-api-access-q9629") pod "2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd" (UID: "2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd"). InnerVolumeSpecName "kube-api-access-q9629". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.835176 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-config-data" (OuterVolumeSpecName: "config-data") pod "2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd" (UID: "2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.860895 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd" (UID: "2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.866250 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.866291 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9629\" (UniqueName: \"kubernetes.io/projected/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-kube-api-access-q9629\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:51 crc kubenswrapper[4755]: I0202 22:56:51.866307 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.175391 4755 generic.go:334] "Generic (PLEG): container finished" podID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" containerID="c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797" exitCode=143 Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.175490 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3b23f3b7-a57f-4ae3-8760-45af18159a6d","Type":"ContainerDied","Data":"c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797"} Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.177979 4755 generic.go:334] "Generic (PLEG): container finished" podID="2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd" containerID="bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573" exitCode=0 Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.178016 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd","Type":"ContainerDied","Data":"bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573"} Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.178055 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd","Type":"ContainerDied","Data":"0c991f1a6017cd2f8baeabdb0056e3c5d20defe757ed99b5124b7aa87ded44f3"} Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.178078 4755 scope.go:117] "RemoveContainer" containerID="bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.178074 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.205747 4755 scope.go:117] "RemoveContainer" containerID="bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573" Feb 02 22:56:52 crc kubenswrapper[4755]: E0202 22:56:52.206212 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573\": container with ID starting with bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573 not found: ID does not exist" containerID="bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.206269 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573"} err="failed to get container status \"bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573\": rpc error: code = NotFound desc = could not find container \"bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573\": container with ID starting with bff936ba8deed2e76ade4a1b7dba41ea5183a41d1aaf86076b3e869db72fc573 not found: ID does not exist" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.223592 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.235814 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.243631 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:56:52 crc kubenswrapper[4755]: E0202 22:56:52.244406 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd" containerName="nova-scheduler-scheduler" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.244438 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd" containerName="nova-scheduler-scheduler" Feb 02 22:56:52 crc kubenswrapper[4755]: E0202 22:56:52.244461 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9d11d8c-3a46-46bd-b2cc-01f736ccb878" containerName="dnsmasq-dns" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.244482 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9d11d8c-3a46-46bd-b2cc-01f736ccb878" containerName="dnsmasq-dns" Feb 02 22:56:52 crc kubenswrapper[4755]: E0202 22:56:52.244515 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9d11d8c-3a46-46bd-b2cc-01f736ccb878" containerName="init" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.244524 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9d11d8c-3a46-46bd-b2cc-01f736ccb878" containerName="init" Feb 02 22:56:52 crc kubenswrapper[4755]: E0202 22:56:52.244542 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35a5dd6b-95f6-43c8-b929-bd370640aa10" containerName="nova-manage" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.244555 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="35a5dd6b-95f6-43c8-b929-bd370640aa10" containerName="nova-manage" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.244857 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="35a5dd6b-95f6-43c8-b929-bd370640aa10" containerName="nova-manage" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.244922 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9d11d8c-3a46-46bd-b2cc-01f736ccb878" containerName="dnsmasq-dns" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.244942 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd" containerName="nova-scheduler-scheduler" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.245892 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.249939 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.256108 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.376029 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnv94\" (UniqueName: \"kubernetes.io/projected/63990686-ba43-4ee1-8e18-6855cccae33b-kube-api-access-lnv94\") pod \"nova-scheduler-0\" (UID: \"63990686-ba43-4ee1-8e18-6855cccae33b\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.376078 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63990686-ba43-4ee1-8e18-6855cccae33b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"63990686-ba43-4ee1-8e18-6855cccae33b\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.376129 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63990686-ba43-4ee1-8e18-6855cccae33b-config-data\") pod \"nova-scheduler-0\" (UID: \"63990686-ba43-4ee1-8e18-6855cccae33b\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.477884 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnv94\" (UniqueName: \"kubernetes.io/projected/63990686-ba43-4ee1-8e18-6855cccae33b-kube-api-access-lnv94\") pod \"nova-scheduler-0\" (UID: \"63990686-ba43-4ee1-8e18-6855cccae33b\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.477962 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63990686-ba43-4ee1-8e18-6855cccae33b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"63990686-ba43-4ee1-8e18-6855cccae33b\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.477997 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63990686-ba43-4ee1-8e18-6855cccae33b-config-data\") pod \"nova-scheduler-0\" (UID: \"63990686-ba43-4ee1-8e18-6855cccae33b\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.482853 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63990686-ba43-4ee1-8e18-6855cccae33b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"63990686-ba43-4ee1-8e18-6855cccae33b\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.483243 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63990686-ba43-4ee1-8e18-6855cccae33b-config-data\") pod \"nova-scheduler-0\" (UID: \"63990686-ba43-4ee1-8e18-6855cccae33b\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.493881 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnv94\" (UniqueName: \"kubernetes.io/projected/63990686-ba43-4ee1-8e18-6855cccae33b-kube-api-access-lnv94\") pod \"nova-scheduler-0\" (UID: \"63990686-ba43-4ee1-8e18-6855cccae33b\") " pod="openstack/nova-scheduler-0" Feb 02 22:56:52 crc kubenswrapper[4755]: I0202 22:56:52.570834 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 02 22:56:53 crc kubenswrapper[4755]: I0202 22:56:53.065390 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 02 22:56:53 crc kubenswrapper[4755]: W0202 22:56:53.068173 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63990686_ba43_4ee1_8e18_6855cccae33b.slice/crio-7fcda46bb95161df8ba6f393e1920b5b9d952c675bb3703d2c448cba6f706e21 WatchSource:0}: Error finding container 7fcda46bb95161df8ba6f393e1920b5b9d952c675bb3703d2c448cba6f706e21: Status 404 returned error can't find the container with id 7fcda46bb95161df8ba6f393e1920b5b9d952c675bb3703d2c448cba6f706e21 Feb 02 22:56:53 crc kubenswrapper[4755]: I0202 22:56:53.089212 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd" path="/var/lib/kubelet/pods/2e0ddb8c-410d-46c8-8a7e-a0d901fd31fd/volumes" Feb 02 22:56:53 crc kubenswrapper[4755]: I0202 22:56:53.194479 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"63990686-ba43-4ee1-8e18-6855cccae33b","Type":"ContainerStarted","Data":"7fcda46bb95161df8ba6f393e1920b5b9d952c675bb3703d2c448cba6f706e21"} Feb 02 22:56:53 crc kubenswrapper[4755]: I0202 22:56:53.389881 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:56:53 crc kubenswrapper[4755]: I0202 22:56:53.389935 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:56:53 crc kubenswrapper[4755]: I0202 22:56:53.934340 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.222:8775/\": read tcp 10.217.0.2:53596->10.217.0.222:8775: read: connection reset by peer" Feb 02 22:56:53 crc kubenswrapper[4755]: I0202 22:56:53.934369 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.222:8775/\": read tcp 10.217.0.2:53588->10.217.0.222:8775: read: connection reset by peer" Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.209325 4755 generic.go:334] "Generic (PLEG): container finished" podID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerID="7316a42b2c095cd11c736b2760f5f4866edbf63c1a5df1fc7145e44ca64bd1a6" exitCode=0 Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.209702 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6","Type":"ContainerDied","Data":"7316a42b2c095cd11c736b2760f5f4866edbf63c1a5df1fc7145e44ca64bd1a6"} Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.216706 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"63990686-ba43-4ee1-8e18-6855cccae33b","Type":"ContainerStarted","Data":"d3d10ee5c8403e8c224782be7e8f1cec2e7cc59fab99a957d7dfdaa428a5945f"} Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.249214 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.249187244 podStartE2EDuration="2.249187244s" podCreationTimestamp="2026-02-02 22:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:56:54.242041613 +0000 UTC m=+1369.933261939" watchObservedRunningTime="2026-02-02 22:56:54.249187244 +0000 UTC m=+1369.940407570" Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.440925 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.532241 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-config-data\") pod \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.532389 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q49ld\" (UniqueName: \"kubernetes.io/projected/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-kube-api-access-q49ld\") pod \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.532473 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-nova-metadata-tls-certs\") pod \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.532526 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-combined-ca-bundle\") pod \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.532586 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-logs\") pod \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\" (UID: \"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6\") " Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.533319 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-logs" (OuterVolumeSpecName: "logs") pod "314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" (UID: "314d7c05-3b1b-4fe8-93f0-32ab7c761fe6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.538192 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-kube-api-access-q49ld" (OuterVolumeSpecName: "kube-api-access-q49ld") pod "314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" (UID: "314d7c05-3b1b-4fe8-93f0-32ab7c761fe6"). InnerVolumeSpecName "kube-api-access-q49ld". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.575931 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-config-data" (OuterVolumeSpecName: "config-data") pod "314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" (UID: "314d7c05-3b1b-4fe8-93f0-32ab7c761fe6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.593204 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" (UID: "314d7c05-3b1b-4fe8-93f0-32ab7c761fe6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.606737 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" (UID: "314d7c05-3b1b-4fe8-93f0-32ab7c761fe6"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.635286 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.635345 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q49ld\" (UniqueName: \"kubernetes.io/projected/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-kube-api-access-q49ld\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.635357 4755 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.635365 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:54 crc kubenswrapper[4755]: I0202 22:56:54.635374 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.229090 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.229111 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"314d7c05-3b1b-4fe8-93f0-32ab7c761fe6","Type":"ContainerDied","Data":"87efb7a547a45db2d9f5ecbdb1044ad3b89c27784bde59fe02c7258830ac2289"} Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.230281 4755 scope.go:117] "RemoveContainer" containerID="7316a42b2c095cd11c736b2760f5f4866edbf63c1a5df1fc7145e44ca64bd1a6" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.255781 4755 scope.go:117] "RemoveContainer" containerID="68c9dce7fd52a56c8b74a82b43ffca965bfe2e63bed9fb7e4fd1976b68e3a2ff" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.265397 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.287366 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.296835 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:55 crc kubenswrapper[4755]: E0202 22:56:55.297310 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerName="nova-metadata-log" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.297325 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerName="nova-metadata-log" Feb 02 22:56:55 crc kubenswrapper[4755]: E0202 22:56:55.297366 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerName="nova-metadata-metadata" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.297374 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerName="nova-metadata-metadata" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.297608 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerName="nova-metadata-log" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.297640 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" containerName="nova-metadata-metadata" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.299176 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.302056 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.302292 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.303479 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.451851 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/140c16ab-7ad3-45f6-8e89-edad569ee119-config-data\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.451932 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6s7m\" (UniqueName: \"kubernetes.io/projected/140c16ab-7ad3-45f6-8e89-edad569ee119-kube-api-access-c6s7m\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.452024 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/140c16ab-7ad3-45f6-8e89-edad569ee119-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.452060 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140c16ab-7ad3-45f6-8e89-edad569ee119-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.452087 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/140c16ab-7ad3-45f6-8e89-edad569ee119-logs\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.554109 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/140c16ab-7ad3-45f6-8e89-edad569ee119-config-data\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.554204 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6s7m\" (UniqueName: \"kubernetes.io/projected/140c16ab-7ad3-45f6-8e89-edad569ee119-kube-api-access-c6s7m\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.554258 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/140c16ab-7ad3-45f6-8e89-edad569ee119-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.554287 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140c16ab-7ad3-45f6-8e89-edad569ee119-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.554311 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/140c16ab-7ad3-45f6-8e89-edad569ee119-logs\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.554967 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/140c16ab-7ad3-45f6-8e89-edad569ee119-logs\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.561856 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/140c16ab-7ad3-45f6-8e89-edad569ee119-config-data\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.562206 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140c16ab-7ad3-45f6-8e89-edad569ee119-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.565818 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/140c16ab-7ad3-45f6-8e89-edad569ee119-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.575898 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6s7m\" (UniqueName: \"kubernetes.io/projected/140c16ab-7ad3-45f6-8e89-edad569ee119-kube-api-access-c6s7m\") pod \"nova-metadata-0\" (UID: \"140c16ab-7ad3-45f6-8e89-edad569ee119\") " pod="openstack/nova-metadata-0" Feb 02 22:56:55 crc kubenswrapper[4755]: I0202 22:56:55.618626 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 02 22:56:56 crc kubenswrapper[4755]: I0202 22:56:56.123785 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 02 22:56:56 crc kubenswrapper[4755]: I0202 22:56:56.239332 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"140c16ab-7ad3-45f6-8e89-edad569ee119","Type":"ContainerStarted","Data":"1a52fd85bda7d5ac5f87b8686a1eaca08b1799c963fd718eab740548418c1158"} Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.004354 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.087560 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="314d7c05-3b1b-4fe8-93f0-32ab7c761fe6" path="/var/lib/kubelet/pods/314d7c05-3b1b-4fe8-93f0-32ab7c761fe6/volumes" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.131383 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-combined-ca-bundle\") pod \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.131467 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-public-tls-certs\") pod \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.131536 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-internal-tls-certs\") pod \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.131608 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7njr9\" (UniqueName: \"kubernetes.io/projected/3b23f3b7-a57f-4ae3-8760-45af18159a6d-kube-api-access-7njr9\") pod \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.131687 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-config-data\") pod \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.131718 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b23f3b7-a57f-4ae3-8760-45af18159a6d-logs\") pod \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\" (UID: \"3b23f3b7-a57f-4ae3-8760-45af18159a6d\") " Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.132996 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b23f3b7-a57f-4ae3-8760-45af18159a6d-logs" (OuterVolumeSpecName: "logs") pod "3b23f3b7-a57f-4ae3-8760-45af18159a6d" (UID: "3b23f3b7-a57f-4ae3-8760-45af18159a6d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.136939 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b23f3b7-a57f-4ae3-8760-45af18159a6d-kube-api-access-7njr9" (OuterVolumeSpecName: "kube-api-access-7njr9") pod "3b23f3b7-a57f-4ae3-8760-45af18159a6d" (UID: "3b23f3b7-a57f-4ae3-8760-45af18159a6d"). InnerVolumeSpecName "kube-api-access-7njr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.161682 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b23f3b7-a57f-4ae3-8760-45af18159a6d" (UID: "3b23f3b7-a57f-4ae3-8760-45af18159a6d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.167360 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-config-data" (OuterVolumeSpecName: "config-data") pod "3b23f3b7-a57f-4ae3-8760-45af18159a6d" (UID: "3b23f3b7-a57f-4ae3-8760-45af18159a6d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.188646 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3b23f3b7-a57f-4ae3-8760-45af18159a6d" (UID: "3b23f3b7-a57f-4ae3-8760-45af18159a6d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.211196 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3b23f3b7-a57f-4ae3-8760-45af18159a6d" (UID: "3b23f3b7-a57f-4ae3-8760-45af18159a6d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.234289 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.234327 4755 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.234339 4755 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.234352 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7njr9\" (UniqueName: \"kubernetes.io/projected/3b23f3b7-a57f-4ae3-8760-45af18159a6d-kube-api-access-7njr9\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.234368 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b23f3b7-a57f-4ae3-8760-45af18159a6d-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.234380 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b23f3b7-a57f-4ae3-8760-45af18159a6d-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.263598 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"140c16ab-7ad3-45f6-8e89-edad569ee119","Type":"ContainerStarted","Data":"5363d9fe122a51babd504e50a00430c18bf41d61a695b344db21734170cd8abb"} Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.263653 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"140c16ab-7ad3-45f6-8e89-edad569ee119","Type":"ContainerStarted","Data":"e7fa3cfd6c0397e76cae49aa01dd2c234319c3fa6057b6284a6848b3f0b31232"} Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.266984 4755 generic.go:334] "Generic (PLEG): container finished" podID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" containerID="66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da" exitCode=0 Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.267026 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3b23f3b7-a57f-4ae3-8760-45af18159a6d","Type":"ContainerDied","Data":"66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da"} Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.267052 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3b23f3b7-a57f-4ae3-8760-45af18159a6d","Type":"ContainerDied","Data":"8281e82a29a560d6b46043e8a35f1692da232496091cb82b630fc8a0586f72f1"} Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.267073 4755 scope.go:117] "RemoveContainer" containerID="66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.267209 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.291846 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.291827564 podStartE2EDuration="2.291827564s" podCreationTimestamp="2026-02-02 22:56:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:56:57.282401049 +0000 UTC m=+1372.973621385" watchObservedRunningTime="2026-02-02 22:56:57.291827564 +0000 UTC m=+1372.983047890" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.299695 4755 scope.go:117] "RemoveContainer" containerID="c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.327823 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.339778 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.361512 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:57 crc kubenswrapper[4755]: E0202 22:56:57.362031 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" containerName="nova-api-api" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.362052 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" containerName="nova-api-api" Feb 02 22:56:57 crc kubenswrapper[4755]: E0202 22:56:57.362069 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" containerName="nova-api-log" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.362076 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" containerName="nova-api-log" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.362280 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" containerName="nova-api-api" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.362311 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" containerName="nova-api-log" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.363535 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.366232 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.366286 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.366816 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.372529 4755 scope.go:117] "RemoveContainer" containerID="66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da" Feb 02 22:56:57 crc kubenswrapper[4755]: E0202 22:56:57.374030 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da\": container with ID starting with 66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da not found: ID does not exist" containerID="66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.374063 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da"} err="failed to get container status \"66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da\": rpc error: code = NotFound desc = could not find container \"66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da\": container with ID starting with 66f6fb2a5cf53bd6c5e46675754b815aba62de1ec1c84eeeeb23d082406262da not found: ID does not exist" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.374082 4755 scope.go:117] "RemoveContainer" containerID="c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.374539 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:57 crc kubenswrapper[4755]: E0202 22:56:57.374772 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797\": container with ID starting with c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797 not found: ID does not exist" containerID="c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.374798 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797"} err="failed to get container status \"c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797\": rpc error: code = NotFound desc = could not find container \"c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797\": container with ID starting with c1a163487e815594614b3b68ccbea9ca347295984cc23e068a013e6a5a22b797 not found: ID does not exist" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.539760 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2835e13-0c5c-4211-935d-0b3bd14a5aca-public-tls-certs\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.539836 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2835e13-0c5c-4211-935d-0b3bd14a5aca-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.539964 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2835e13-0c5c-4211-935d-0b3bd14a5aca-logs\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.540200 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vs85\" (UniqueName: \"kubernetes.io/projected/f2835e13-0c5c-4211-935d-0b3bd14a5aca-kube-api-access-7vs85\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.540236 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2835e13-0c5c-4211-935d-0b3bd14a5aca-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.540292 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2835e13-0c5c-4211-935d-0b3bd14a5aca-config-data\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.572609 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.642415 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vs85\" (UniqueName: \"kubernetes.io/projected/f2835e13-0c5c-4211-935d-0b3bd14a5aca-kube-api-access-7vs85\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.642490 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2835e13-0c5c-4211-935d-0b3bd14a5aca-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.642541 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2835e13-0c5c-4211-935d-0b3bd14a5aca-config-data\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.642789 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2835e13-0c5c-4211-935d-0b3bd14a5aca-public-tls-certs\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.642836 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2835e13-0c5c-4211-935d-0b3bd14a5aca-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.642889 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2835e13-0c5c-4211-935d-0b3bd14a5aca-logs\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.643529 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2835e13-0c5c-4211-935d-0b3bd14a5aca-logs\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.651072 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2835e13-0c5c-4211-935d-0b3bd14a5aca-config-data\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.652323 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2835e13-0c5c-4211-935d-0b3bd14a5aca-public-tls-certs\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.654820 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2835e13-0c5c-4211-935d-0b3bd14a5aca-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.655693 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2835e13-0c5c-4211-935d-0b3bd14a5aca-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.687840 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vs85\" (UniqueName: \"kubernetes.io/projected/f2835e13-0c5c-4211-935d-0b3bd14a5aca-kube-api-access-7vs85\") pod \"nova-api-0\" (UID: \"f2835e13-0c5c-4211-935d-0b3bd14a5aca\") " pod="openstack/nova-api-0" Feb 02 22:56:57 crc kubenswrapper[4755]: I0202 22:56:57.690045 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 02 22:56:58 crc kubenswrapper[4755]: W0202 22:56:58.211026 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2835e13_0c5c_4211_935d_0b3bd14a5aca.slice/crio-35e5c5731086b1c3f67e8010a19e263ee27f36e2eda59b15668cd1927907440e WatchSource:0}: Error finding container 35e5c5731086b1c3f67e8010a19e263ee27f36e2eda59b15668cd1927907440e: Status 404 returned error can't find the container with id 35e5c5731086b1c3f67e8010a19e263ee27f36e2eda59b15668cd1927907440e Feb 02 22:56:58 crc kubenswrapper[4755]: I0202 22:56:58.216205 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 02 22:56:58 crc kubenswrapper[4755]: I0202 22:56:58.281405 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f2835e13-0c5c-4211-935d-0b3bd14a5aca","Type":"ContainerStarted","Data":"35e5c5731086b1c3f67e8010a19e263ee27f36e2eda59b15668cd1927907440e"} Feb 02 22:56:59 crc kubenswrapper[4755]: I0202 22:56:59.082045 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b23f3b7-a57f-4ae3-8760-45af18159a6d" path="/var/lib/kubelet/pods/3b23f3b7-a57f-4ae3-8760-45af18159a6d/volumes" Feb 02 22:56:59 crc kubenswrapper[4755]: I0202 22:56:59.310688 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f2835e13-0c5c-4211-935d-0b3bd14a5aca","Type":"ContainerStarted","Data":"1a5ff3c8d54f44a0dbd1c6586214a14d6668372b817bcedb0d9fac8383ae10a2"} Feb 02 22:56:59 crc kubenswrapper[4755]: I0202 22:56:59.310781 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f2835e13-0c5c-4211-935d-0b3bd14a5aca","Type":"ContainerStarted","Data":"2c25e881606baca156cca3387abd063faac5110eca79058017bca9497a8ed606"} Feb 02 22:56:59 crc kubenswrapper[4755]: I0202 22:56:59.352790 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.352771137 podStartE2EDuration="2.352771137s" podCreationTimestamp="2026-02-02 22:56:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:56:59.340628176 +0000 UTC m=+1375.031848522" watchObservedRunningTime="2026-02-02 22:56:59.352771137 +0000 UTC m=+1375.043991463" Feb 02 22:57:00 crc kubenswrapper[4755]: I0202 22:57:00.619247 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 22:57:00 crc kubenswrapper[4755]: I0202 22:57:00.619673 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 02 22:57:02 crc kubenswrapper[4755]: I0202 22:57:02.572710 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 02 22:57:02 crc kubenswrapper[4755]: I0202 22:57:02.615309 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 02 22:57:03 crc kubenswrapper[4755]: I0202 22:57:03.380670 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 02 22:57:05 crc kubenswrapper[4755]: I0202 22:57:05.618802 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 22:57:05 crc kubenswrapper[4755]: I0202 22:57:05.619260 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 02 22:57:06 crc kubenswrapper[4755]: I0202 22:57:06.642005 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="140c16ab-7ad3-45f6-8e89-edad569ee119" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.234:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 22:57:06 crc kubenswrapper[4755]: I0202 22:57:06.642156 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="140c16ab-7ad3-45f6-8e89-edad569ee119" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.234:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 22:57:07 crc kubenswrapper[4755]: I0202 22:57:07.690950 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 22:57:07 crc kubenswrapper[4755]: I0202 22:57:07.691244 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 02 22:57:08 crc kubenswrapper[4755]: I0202 22:57:08.703896 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f2835e13-0c5c-4211-935d-0b3bd14a5aca" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.235:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 22:57:08 crc kubenswrapper[4755]: I0202 22:57:08.703944 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f2835e13-0c5c-4211-935d-0b3bd14a5aca" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.235:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 02 22:57:09 crc kubenswrapper[4755]: I0202 22:57:09.641543 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 22:57:15 crc kubenswrapper[4755]: I0202 22:57:15.626085 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 22:57:15 crc kubenswrapper[4755]: I0202 22:57:15.630719 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 02 22:57:15 crc kubenswrapper[4755]: I0202 22:57:15.632213 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 22:57:16 crc kubenswrapper[4755]: I0202 22:57:16.510281 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 02 22:57:17 crc kubenswrapper[4755]: I0202 22:57:17.702562 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 22:57:17 crc kubenswrapper[4755]: I0202 22:57:17.704040 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 02 22:57:17 crc kubenswrapper[4755]: I0202 22:57:17.705931 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 22:57:17 crc kubenswrapper[4755]: I0202 22:57:17.706529 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 02 22:57:17 crc kubenswrapper[4755]: I0202 22:57:17.713924 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 22:57:17 crc kubenswrapper[4755]: I0202 22:57:17.718185 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.032831 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rfqj4"] Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.036716 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.048370 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rfqj4"] Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.163381 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fntp4\" (UniqueName: \"kubernetes.io/projected/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-kube-api-access-fntp4\") pod \"redhat-operators-rfqj4\" (UID: \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\") " pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.163553 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-catalog-content\") pod \"redhat-operators-rfqj4\" (UID: \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\") " pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.163604 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-utilities\") pod \"redhat-operators-rfqj4\" (UID: \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\") " pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.266122 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fntp4\" (UniqueName: \"kubernetes.io/projected/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-kube-api-access-fntp4\") pod \"redhat-operators-rfqj4\" (UID: \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\") " pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.266239 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-catalog-content\") pod \"redhat-operators-rfqj4\" (UID: \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\") " pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.266280 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-utilities\") pod \"redhat-operators-rfqj4\" (UID: \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\") " pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.266969 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-utilities\") pod \"redhat-operators-rfqj4\" (UID: \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\") " pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.267013 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-catalog-content\") pod \"redhat-operators-rfqj4\" (UID: \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\") " pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.291368 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fntp4\" (UniqueName: \"kubernetes.io/projected/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-kube-api-access-fntp4\") pod \"redhat-operators-rfqj4\" (UID: \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\") " pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.388939 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.389215 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.389313 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.390089 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e1a0edb6bc3318168553c3186dbd5ca8239787806078b7f1d8e7cf50cd938918"} pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.390234 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" containerID="cri-o://e1a0edb6bc3318168553c3186dbd5ca8239787806078b7f1d8e7cf50cd938918" gracePeriod=600 Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.404932 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.611536 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerID="e1a0edb6bc3318168553c3186dbd5ca8239787806078b7f1d8e7cf50cd938918" exitCode=0 Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.611867 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerDied","Data":"e1a0edb6bc3318168553c3186dbd5ca8239787806078b7f1d8e7cf50cd938918"} Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.611905 4755 scope.go:117] "RemoveContainer" containerID="b7878a61f8677fe4ed7b8526051e4c43447e019572d069fa0c208b41ce260865" Feb 02 22:57:23 crc kubenswrapper[4755]: I0202 22:57:23.962623 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rfqj4"] Feb 02 22:57:23 crc kubenswrapper[4755]: W0202 22:57:23.964132 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d72a2c1_c4d9_4f54_8ca1_aa44762ae943.slice/crio-b365daf56b027d260d8c13f994e6284c54135f001a9c2ca6fcf60da882c8e58a WatchSource:0}: Error finding container b365daf56b027d260d8c13f994e6284c54135f001a9c2ca6fcf60da882c8e58a: Status 404 returned error can't find the container with id b365daf56b027d260d8c13f994e6284c54135f001a9c2ca6fcf60da882c8e58a Feb 02 22:57:24 crc kubenswrapper[4755]: I0202 22:57:24.623551 4755 generic.go:334] "Generic (PLEG): container finished" podID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerID="f6776e005cbf09f4e35fa1fbf20523eaab8931899a9b5c8b893fd62bc3ab503a" exitCode=0 Feb 02 22:57:24 crc kubenswrapper[4755]: I0202 22:57:24.623763 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfqj4" event={"ID":"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943","Type":"ContainerDied","Data":"f6776e005cbf09f4e35fa1fbf20523eaab8931899a9b5c8b893fd62bc3ab503a"} Feb 02 22:57:24 crc kubenswrapper[4755]: I0202 22:57:24.625301 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfqj4" event={"ID":"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943","Type":"ContainerStarted","Data":"b365daf56b027d260d8c13f994e6284c54135f001a9c2ca6fcf60da882c8e58a"} Feb 02 22:57:24 crc kubenswrapper[4755]: I0202 22:57:24.630607 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63"} Feb 02 22:57:26 crc kubenswrapper[4755]: I0202 22:57:26.655120 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfqj4" event={"ID":"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943","Type":"ContainerStarted","Data":"e35c3b7030be3217cbbed55d1a767c2e67e274f40a17e5f96ddbf68bbf4cce20"} Feb 02 22:57:26 crc kubenswrapper[4755]: I0202 22:57:26.943097 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-75zwb"] Feb 02 22:57:26 crc kubenswrapper[4755]: I0202 22:57:26.955082 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-75zwb"] Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.112428 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe" path="/var/lib/kubelet/pods/bb79e2ee-e3af-4a8a-bb3e-a8f0deac67fe/volumes" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.113078 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-ltkhr"] Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.131089 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.135747 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.159572 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-ltkhr"] Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.290359 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkt7h\" (UniqueName: \"kubernetes.io/projected/909d96ec-6e4a-4012-b7bf-bb0f95815443-kube-api-access-lkt7h\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.290426 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-combined-ca-bundle\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.290640 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-config-data\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.290692 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-scripts\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.290821 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/909d96ec-6e4a-4012-b7bf-bb0f95815443-certs\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.392603 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-scripts\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.392946 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/909d96ec-6e4a-4012-b7bf-bb0f95815443-certs\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.393034 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkt7h\" (UniqueName: \"kubernetes.io/projected/909d96ec-6e4a-4012-b7bf-bb0f95815443-kube-api-access-lkt7h\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.393080 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-combined-ca-bundle\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.393190 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-config-data\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.400006 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-scripts\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.400361 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-combined-ca-bundle\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.401942 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-config-data\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.402624 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/909d96ec-6e4a-4012-b7bf-bb0f95815443-certs\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.409439 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkt7h\" (UniqueName: \"kubernetes.io/projected/909d96ec-6e4a-4012-b7bf-bb0f95815443-kube-api-access-lkt7h\") pod \"cloudkitty-db-sync-ltkhr\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.494508 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:27 crc kubenswrapper[4755]: W0202 22:57:27.932998 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod909d96ec_6e4a_4012_b7bf_bb0f95815443.slice/crio-3bb2c152c5444ce32572e37c6124452308d4e1e78c1ba4a8d75c0a6ee842adba WatchSource:0}: Error finding container 3bb2c152c5444ce32572e37c6124452308d4e1e78c1ba4a8d75c0a6ee842adba: Status 404 returned error can't find the container with id 3bb2c152c5444ce32572e37c6124452308d4e1e78c1ba4a8d75c0a6ee842adba Feb 02 22:57:27 crc kubenswrapper[4755]: I0202 22:57:27.939848 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-ltkhr"] Feb 02 22:57:28 crc kubenswrapper[4755]: I0202 22:57:28.625397 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 22:57:28 crc kubenswrapper[4755]: I0202 22:57:28.680360 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-ltkhr" event={"ID":"909d96ec-6e4a-4012-b7bf-bb0f95815443","Type":"ContainerStarted","Data":"d06a9fe7a85e3048a48fdd4742e37222e3b98e2591e4985cc68572ab8522c6a8"} Feb 02 22:57:28 crc kubenswrapper[4755]: I0202 22:57:28.680417 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-ltkhr" event={"ID":"909d96ec-6e4a-4012-b7bf-bb0f95815443","Type":"ContainerStarted","Data":"3bb2c152c5444ce32572e37c6124452308d4e1e78c1ba4a8d75c0a6ee842adba"} Feb 02 22:57:28 crc kubenswrapper[4755]: I0202 22:57:28.697738 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-ltkhr" podStartSLOduration=1.429400776 podStartE2EDuration="1.697708523s" podCreationTimestamp="2026-02-02 22:57:27 +0000 UTC" firstStartedPulling="2026-02-02 22:57:27.935343117 +0000 UTC m=+1403.626563443" lastFinishedPulling="2026-02-02 22:57:28.203650854 +0000 UTC m=+1403.894871190" observedRunningTime="2026-02-02 22:57:28.696061566 +0000 UTC m=+1404.387281892" watchObservedRunningTime="2026-02-02 22:57:28.697708523 +0000 UTC m=+1404.388928849" Feb 02 22:57:28 crc kubenswrapper[4755]: I0202 22:57:28.807969 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:57:28 crc kubenswrapper[4755]: I0202 22:57:28.808451 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="sg-core" containerID="cri-o://f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56" gracePeriod=30 Feb 02 22:57:28 crc kubenswrapper[4755]: I0202 22:57:28.808485 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="proxy-httpd" containerID="cri-o://5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15" gracePeriod=30 Feb 02 22:57:28 crc kubenswrapper[4755]: I0202 22:57:28.808517 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="ceilometer-notification-agent" containerID="cri-o://351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd" gracePeriod=30 Feb 02 22:57:28 crc kubenswrapper[4755]: I0202 22:57:28.808297 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="ceilometer-central-agent" containerID="cri-o://2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5" gracePeriod=30 Feb 02 22:57:29 crc kubenswrapper[4755]: I0202 22:57:29.693869 4755 generic.go:334] "Generic (PLEG): container finished" podID="af709f14-439f-4a0f-bf46-c23ae0483426" containerID="5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15" exitCode=0 Feb 02 22:57:29 crc kubenswrapper[4755]: I0202 22:57:29.694202 4755 generic.go:334] "Generic (PLEG): container finished" podID="af709f14-439f-4a0f-bf46-c23ae0483426" containerID="f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56" exitCode=2 Feb 02 22:57:29 crc kubenswrapper[4755]: I0202 22:57:29.694220 4755 generic.go:334] "Generic (PLEG): container finished" podID="af709f14-439f-4a0f-bf46-c23ae0483426" containerID="2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5" exitCode=0 Feb 02 22:57:29 crc kubenswrapper[4755]: I0202 22:57:29.695290 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af709f14-439f-4a0f-bf46-c23ae0483426","Type":"ContainerDied","Data":"5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15"} Feb 02 22:57:29 crc kubenswrapper[4755]: I0202 22:57:29.695332 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af709f14-439f-4a0f-bf46-c23ae0483426","Type":"ContainerDied","Data":"f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56"} Feb 02 22:57:29 crc kubenswrapper[4755]: I0202 22:57:29.695346 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af709f14-439f-4a0f-bf46-c23ae0483426","Type":"ContainerDied","Data":"2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5"} Feb 02 22:57:29 crc kubenswrapper[4755]: I0202 22:57:29.716631 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.589612 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.668639 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-config-data\") pod \"af709f14-439f-4a0f-bf46-c23ae0483426\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.668774 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-sg-core-conf-yaml\") pod \"af709f14-439f-4a0f-bf46-c23ae0483426\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.668876 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcgb7\" (UniqueName: \"kubernetes.io/projected/af709f14-439f-4a0f-bf46-c23ae0483426-kube-api-access-qcgb7\") pod \"af709f14-439f-4a0f-bf46-c23ae0483426\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.668933 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af709f14-439f-4a0f-bf46-c23ae0483426-run-httpd\") pod \"af709f14-439f-4a0f-bf46-c23ae0483426\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.668959 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-ceilometer-tls-certs\") pod \"af709f14-439f-4a0f-bf46-c23ae0483426\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.669008 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-scripts\") pod \"af709f14-439f-4a0f-bf46-c23ae0483426\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.669047 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af709f14-439f-4a0f-bf46-c23ae0483426-log-httpd\") pod \"af709f14-439f-4a0f-bf46-c23ae0483426\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.669078 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-combined-ca-bundle\") pod \"af709f14-439f-4a0f-bf46-c23ae0483426\" (UID: \"af709f14-439f-4a0f-bf46-c23ae0483426\") " Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.670150 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af709f14-439f-4a0f-bf46-c23ae0483426-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "af709f14-439f-4a0f-bf46-c23ae0483426" (UID: "af709f14-439f-4a0f-bf46-c23ae0483426"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.673361 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af709f14-439f-4a0f-bf46-c23ae0483426-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "af709f14-439f-4a0f-bf46-c23ae0483426" (UID: "af709f14-439f-4a0f-bf46-c23ae0483426"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.692317 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-scripts" (OuterVolumeSpecName: "scripts") pod "af709f14-439f-4a0f-bf46-c23ae0483426" (UID: "af709f14-439f-4a0f-bf46-c23ae0483426"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.712273 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af709f14-439f-4a0f-bf46-c23ae0483426-kube-api-access-qcgb7" (OuterVolumeSpecName: "kube-api-access-qcgb7") pod "af709f14-439f-4a0f-bf46-c23ae0483426" (UID: "af709f14-439f-4a0f-bf46-c23ae0483426"). InnerVolumeSpecName "kube-api-access-qcgb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.761497 4755 generic.go:334] "Generic (PLEG): container finished" podID="af709f14-439f-4a0f-bf46-c23ae0483426" containerID="351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd" exitCode=0 Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.761538 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af709f14-439f-4a0f-bf46-c23ae0483426","Type":"ContainerDied","Data":"351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd"} Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.761566 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"af709f14-439f-4a0f-bf46-c23ae0483426","Type":"ContainerDied","Data":"5a46ca6e96312583fb20fc9bd2c690b153fda0582b8717139ba355ea7ccdfac7"} Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.761582 4755 scope.go:117] "RemoveContainer" containerID="5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.761771 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.773142 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcgb7\" (UniqueName: \"kubernetes.io/projected/af709f14-439f-4a0f-bf46-c23ae0483426-kube-api-access-qcgb7\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.773330 4755 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af709f14-439f-4a0f-bf46-c23ae0483426-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.773709 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.773786 4755 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/af709f14-439f-4a0f-bf46-c23ae0483426-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.790337 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "af709f14-439f-4a0f-bf46-c23ae0483426" (UID: "af709f14-439f-4a0f-bf46-c23ae0483426"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.829037 4755 scope.go:117] "RemoveContainer" containerID="f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.853639 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "af709f14-439f-4a0f-bf46-c23ae0483426" (UID: "af709f14-439f-4a0f-bf46-c23ae0483426"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.861117 4755 scope.go:117] "RemoveContainer" containerID="351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.862139 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-config-data" (OuterVolumeSpecName: "config-data") pod "af709f14-439f-4a0f-bf46-c23ae0483426" (UID: "af709f14-439f-4a0f-bf46-c23ae0483426"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.869353 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "af709f14-439f-4a0f-bf46-c23ae0483426" (UID: "af709f14-439f-4a0f-bf46-c23ae0483426"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.875575 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.875599 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.875608 4755 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.875616 4755 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/af709f14-439f-4a0f-bf46-c23ae0483426-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.880895 4755 scope.go:117] "RemoveContainer" containerID="2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.903418 4755 scope.go:117] "RemoveContainer" containerID="5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15" Feb 02 22:57:30 crc kubenswrapper[4755]: E0202 22:57:30.912022 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15\": container with ID starting with 5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15 not found: ID does not exist" containerID="5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.912073 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15"} err="failed to get container status \"5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15\": rpc error: code = NotFound desc = could not find container \"5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15\": container with ID starting with 5664dc13cc5f441046a203ad41fd688672e983afdd2042b8043d046d9d9abf15 not found: ID does not exist" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.912109 4755 scope.go:117] "RemoveContainer" containerID="f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56" Feb 02 22:57:30 crc kubenswrapper[4755]: E0202 22:57:30.912780 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56\": container with ID starting with f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56 not found: ID does not exist" containerID="f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.912832 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56"} err="failed to get container status \"f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56\": rpc error: code = NotFound desc = could not find container \"f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56\": container with ID starting with f258a723b994f60acb6fc7606a22cc043433c1abfe1e641cd4f7a94a602cce56 not found: ID does not exist" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.912865 4755 scope.go:117] "RemoveContainer" containerID="351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd" Feb 02 22:57:30 crc kubenswrapper[4755]: E0202 22:57:30.913932 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd\": container with ID starting with 351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd not found: ID does not exist" containerID="351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.913955 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd"} err="failed to get container status \"351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd\": rpc error: code = NotFound desc = could not find container \"351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd\": container with ID starting with 351ebcf32aba0db6b5a9a1faf3ab935a1376b0172f3b72566677ef4542cdeefd not found: ID does not exist" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.913969 4755 scope.go:117] "RemoveContainer" containerID="2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5" Feb 02 22:57:30 crc kubenswrapper[4755]: E0202 22:57:30.914247 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5\": container with ID starting with 2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5 not found: ID does not exist" containerID="2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5" Feb 02 22:57:30 crc kubenswrapper[4755]: I0202 22:57:30.914284 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5"} err="failed to get container status \"2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5\": rpc error: code = NotFound desc = could not find container \"2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5\": container with ID starting with 2ab106f6d962009f9a6c1b02542975a4f2d108a6deaae2792f5a89bd950681d5 not found: ID does not exist" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.104243 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.117649 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.127360 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:57:31 crc kubenswrapper[4755]: E0202 22:57:31.127827 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="ceilometer-central-agent" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.127848 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="ceilometer-central-agent" Feb 02 22:57:31 crc kubenswrapper[4755]: E0202 22:57:31.127878 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="ceilometer-notification-agent" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.127886 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="ceilometer-notification-agent" Feb 02 22:57:31 crc kubenswrapper[4755]: E0202 22:57:31.127905 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="proxy-httpd" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.127912 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="proxy-httpd" Feb 02 22:57:31 crc kubenswrapper[4755]: E0202 22:57:31.127928 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="sg-core" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.127935 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="sg-core" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.128109 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="sg-core" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.128125 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="proxy-httpd" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.128132 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="ceilometer-central-agent" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.128156 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" containerName="ceilometer-notification-agent" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.131304 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.133904 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.134094 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.142305 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.148483 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.283151 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-log-httpd\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.283258 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-run-httpd\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.283320 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-scripts\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.283350 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.283373 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zp68\" (UniqueName: \"kubernetes.io/projected/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-kube-api-access-6zp68\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.283403 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-config-data\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.283422 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.283446 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.385187 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.385450 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zp68\" (UniqueName: \"kubernetes.io/projected/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-kube-api-access-6zp68\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.385491 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-config-data\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.385520 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.385557 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.385583 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-log-httpd\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.385654 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-run-httpd\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.385710 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-scripts\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.386235 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-run-httpd\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.386321 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-log-httpd\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.390120 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.390224 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.390655 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-config-data\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.392764 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-scripts\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.392988 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.406485 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zp68\" (UniqueName: \"kubernetes.io/projected/3f2efd24-009b-4ad3-a07f-7d1d583e4bee-kube-api-access-6zp68\") pod \"ceilometer-0\" (UID: \"3f2efd24-009b-4ad3-a07f-7d1d583e4bee\") " pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.453027 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.782654 4755 generic.go:334] "Generic (PLEG): container finished" podID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerID="e35c3b7030be3217cbbed55d1a767c2e67e274f40a17e5f96ddbf68bbf4cce20" exitCode=0 Feb 02 22:57:31 crc kubenswrapper[4755]: I0202 22:57:31.782749 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfqj4" event={"ID":"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943","Type":"ContainerDied","Data":"e35c3b7030be3217cbbed55d1a767c2e67e274f40a17e5f96ddbf68bbf4cce20"} Feb 02 22:57:32 crc kubenswrapper[4755]: I0202 22:57:32.012835 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 02 22:57:32 crc kubenswrapper[4755]: I0202 22:57:32.796120 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfqj4" event={"ID":"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943","Type":"ContainerStarted","Data":"2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb"} Feb 02 22:57:32 crc kubenswrapper[4755]: I0202 22:57:32.799758 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-ltkhr" event={"ID":"909d96ec-6e4a-4012-b7bf-bb0f95815443","Type":"ContainerDied","Data":"d06a9fe7a85e3048a48fdd4742e37222e3b98e2591e4985cc68572ab8522c6a8"} Feb 02 22:57:32 crc kubenswrapper[4755]: I0202 22:57:32.799362 4755 generic.go:334] "Generic (PLEG): container finished" podID="909d96ec-6e4a-4012-b7bf-bb0f95815443" containerID="d06a9fe7a85e3048a48fdd4742e37222e3b98e2591e4985cc68572ab8522c6a8" exitCode=0 Feb 02 22:57:32 crc kubenswrapper[4755]: I0202 22:57:32.802212 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f2efd24-009b-4ad3-a07f-7d1d583e4bee","Type":"ContainerStarted","Data":"97eb06ad88e35d93a0cc7fe92dd68a165f8a6be87b39eac9c293b83fa8d7325b"} Feb 02 22:57:32 crc kubenswrapper[4755]: I0202 22:57:32.819361 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rfqj4" podStartSLOduration=2.120437915 podStartE2EDuration="9.81931733s" podCreationTimestamp="2026-02-02 22:57:23 +0000 UTC" firstStartedPulling="2026-02-02 22:57:24.625962175 +0000 UTC m=+1400.317182511" lastFinishedPulling="2026-02-02 22:57:32.3248416 +0000 UTC m=+1408.016061926" observedRunningTime="2026-02-02 22:57:32.812019215 +0000 UTC m=+1408.503239541" watchObservedRunningTime="2026-02-02 22:57:32.81931733 +0000 UTC m=+1408.510537656" Feb 02 22:57:33 crc kubenswrapper[4755]: I0202 22:57:33.084492 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af709f14-439f-4a0f-bf46-c23ae0483426" path="/var/lib/kubelet/pods/af709f14-439f-4a0f-bf46-c23ae0483426/volumes" Feb 02 22:57:33 crc kubenswrapper[4755]: I0202 22:57:33.405323 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:33 crc kubenswrapper[4755]: I0202 22:57:33.405414 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:33 crc kubenswrapper[4755]: I0202 22:57:33.826042 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="58b4faf6-d651-4094-b0bd-857e9074d9a9" containerName="rabbitmq" containerID="cri-o://002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393" gracePeriod=604795 Feb 02 22:57:34 crc kubenswrapper[4755]: I0202 22:57:34.437360 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="358a528d-56dd-4737-af2c-750423bbdc56" containerName="rabbitmq" containerID="cri-o://fd778f826ddd9aa8888475dc8bfa3d0d58355f61057d10f8903274d71add7300" gracePeriod=604796 Feb 02 22:57:34 crc kubenswrapper[4755]: I0202 22:57:34.457094 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rfqj4" podUID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerName="registry-server" probeResult="failure" output=< Feb 02 22:57:34 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Feb 02 22:57:34 crc kubenswrapper[4755]: > Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.580745 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.674167 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkt7h\" (UniqueName: \"kubernetes.io/projected/909d96ec-6e4a-4012-b7bf-bb0f95815443-kube-api-access-lkt7h\") pod \"909d96ec-6e4a-4012-b7bf-bb0f95815443\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.674263 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/909d96ec-6e4a-4012-b7bf-bb0f95815443-certs\") pod \"909d96ec-6e4a-4012-b7bf-bb0f95815443\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.674455 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-config-data\") pod \"909d96ec-6e4a-4012-b7bf-bb0f95815443\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.674527 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-scripts\") pod \"909d96ec-6e4a-4012-b7bf-bb0f95815443\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.674556 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-combined-ca-bundle\") pod \"909d96ec-6e4a-4012-b7bf-bb0f95815443\" (UID: \"909d96ec-6e4a-4012-b7bf-bb0f95815443\") " Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.683243 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/909d96ec-6e4a-4012-b7bf-bb0f95815443-certs" (OuterVolumeSpecName: "certs") pod "909d96ec-6e4a-4012-b7bf-bb0f95815443" (UID: "909d96ec-6e4a-4012-b7bf-bb0f95815443"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.686974 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/909d96ec-6e4a-4012-b7bf-bb0f95815443-kube-api-access-lkt7h" (OuterVolumeSpecName: "kube-api-access-lkt7h") pod "909d96ec-6e4a-4012-b7bf-bb0f95815443" (UID: "909d96ec-6e4a-4012-b7bf-bb0f95815443"). InnerVolumeSpecName "kube-api-access-lkt7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.687216 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-scripts" (OuterVolumeSpecName: "scripts") pod "909d96ec-6e4a-4012-b7bf-bb0f95815443" (UID: "909d96ec-6e4a-4012-b7bf-bb0f95815443"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.738092 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "909d96ec-6e4a-4012-b7bf-bb0f95815443" (UID: "909d96ec-6e4a-4012-b7bf-bb0f95815443"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.741132 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-config-data" (OuterVolumeSpecName: "config-data") pod "909d96ec-6e4a-4012-b7bf-bb0f95815443" (UID: "909d96ec-6e4a-4012-b7bf-bb0f95815443"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.776328 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.776356 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.776366 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/909d96ec-6e4a-4012-b7bf-bb0f95815443-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.776375 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkt7h\" (UniqueName: \"kubernetes.io/projected/909d96ec-6e4a-4012-b7bf-bb0f95815443-kube-api-access-lkt7h\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.776384 4755 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/909d96ec-6e4a-4012-b7bf-bb0f95815443-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.836494 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-ltkhr" event={"ID":"909d96ec-6e4a-4012-b7bf-bb0f95815443","Type":"ContainerDied","Data":"3bb2c152c5444ce32572e37c6124452308d4e1e78c1ba4a8d75c0a6ee842adba"} Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.836534 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bb2c152c5444ce32572e37c6124452308d4e1e78c1ba4a8d75c0a6ee842adba" Feb 02 22:57:35 crc kubenswrapper[4755]: I0202 22:57:35.836537 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-ltkhr" Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.687402 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-pnrv4"] Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.698477 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-pnrv4"] Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.789864 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-6nq66"] Feb 02 22:57:36 crc kubenswrapper[4755]: E0202 22:57:36.790394 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="909d96ec-6e4a-4012-b7bf-bb0f95815443" containerName="cloudkitty-db-sync" Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.790412 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="909d96ec-6e4a-4012-b7bf-bb0f95815443" containerName="cloudkitty-db-sync" Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.790692 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="909d96ec-6e4a-4012-b7bf-bb0f95815443" containerName="cloudkitty-db-sync" Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.791594 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.795500 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.799967 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-6nq66"] Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.855593 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f2efd24-009b-4ad3-a07f-7d1d583e4bee","Type":"ContainerStarted","Data":"d8893734dfaae5d2bf680b891c2a4f27107cb94592e556c600835dc21dcbb693"} Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.903931 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-combined-ca-bundle\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.904041 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-scripts\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.904076 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-config-data\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.904168 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-certs\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:36 crc kubenswrapper[4755]: I0202 22:57:36.904200 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zndgh\" (UniqueName: \"kubernetes.io/projected/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-kube-api-access-zndgh\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.005875 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-combined-ca-bundle\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.006270 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-scripts\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.006308 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-config-data\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.006365 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-certs\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.006396 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zndgh\" (UniqueName: \"kubernetes.io/projected/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-kube-api-access-zndgh\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.012310 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-combined-ca-bundle\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.012543 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-config-data\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.015449 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-certs\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.015501 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-scripts\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.027414 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zndgh\" (UniqueName: \"kubernetes.io/projected/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-kube-api-access-zndgh\") pod \"cloudkitty-storageinit-6nq66\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.081988 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43476281-d17e-4f36-8934-215e34e77ac6" path="/var/lib/kubelet/pods/43476281-d17e-4f36-8934-215e34e77ac6/volumes" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.157225 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.501195 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="58b4faf6-d651-4094-b0bd-857e9074d9a9" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: connect: connection refused" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.633659 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-6nq66"] Feb 02 22:57:37 crc kubenswrapper[4755]: W0202 22:57:37.642685 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddadba444_a0a5_4b6f_8d3b_9c2ed25f1261.slice/crio-701675e63f6cb487722ed6740c3c4d3a11bc94b06fcc1af6de21bcecf501a7cb WatchSource:0}: Error finding container 701675e63f6cb487722ed6740c3c4d3a11bc94b06fcc1af6de21bcecf501a7cb: Status 404 returned error can't find the container with id 701675e63f6cb487722ed6740c3c4d3a11bc94b06fcc1af6de21bcecf501a7cb Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.865267 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="358a528d-56dd-4737-af2c-750423bbdc56" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.108:5671: connect: connection refused" Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.868291 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f2efd24-009b-4ad3-a07f-7d1d583e4bee","Type":"ContainerStarted","Data":"438542439bcf40637977c83f9bafcff240ec49f07a9f9233e1dc882e9112968a"} Feb 02 22:57:37 crc kubenswrapper[4755]: I0202 22:57:37.869829 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-6nq66" event={"ID":"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261","Type":"ContainerStarted","Data":"701675e63f6cb487722ed6740c3c4d3a11bc94b06fcc1af6de21bcecf501a7cb"} Feb 02 22:57:38 crc kubenswrapper[4755]: I0202 22:57:38.887455 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f2efd24-009b-4ad3-a07f-7d1d583e4bee","Type":"ContainerStarted","Data":"5d870cca62d44768edc277d988890c9cd25676637a5354524355246c3c559b22"} Feb 02 22:57:38 crc kubenswrapper[4755]: I0202 22:57:38.889981 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-6nq66" event={"ID":"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261","Type":"ContainerStarted","Data":"dde1a5ceead556147f68a30fad2b43052ba2245d8afb407b9a6768c5410426d7"} Feb 02 22:57:38 crc kubenswrapper[4755]: I0202 22:57:38.928478 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-6nq66" podStartSLOduration=2.92845319 podStartE2EDuration="2.92845319s" podCreationTimestamp="2026-02-02 22:57:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:57:38.917552524 +0000 UTC m=+1414.608772860" watchObservedRunningTime="2026-02-02 22:57:38.92845319 +0000 UTC m=+1414.619673536" Feb 02 22:57:39 crc kubenswrapper[4755]: I0202 22:57:39.908808 4755 generic.go:334] "Generic (PLEG): container finished" podID="dadba444-a0a5-4b6f-8d3b-9c2ed25f1261" containerID="dde1a5ceead556147f68a30fad2b43052ba2245d8afb407b9a6768c5410426d7" exitCode=0 Feb 02 22:57:39 crc kubenswrapper[4755]: I0202 22:57:39.908983 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-6nq66" event={"ID":"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261","Type":"ContainerDied","Data":"dde1a5ceead556147f68a30fad2b43052ba2245d8afb407b9a6768c5410426d7"} Feb 02 22:57:40 crc kubenswrapper[4755]: E0202 22:57:40.250575 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58b4faf6_d651_4094_b0bd_857e9074d9a9.slice/crio-002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58b4faf6_d651_4094_b0bd_857e9074d9a9.slice/crio-conmon-002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393.scope\": RecentStats: unable to find data in memory cache]" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.519013 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.692092 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-confd\") pod \"58b4faf6-d651-4094-b0bd-857e9074d9a9\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.692194 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/58b4faf6-d651-4094-b0bd-857e9074d9a9-pod-info\") pod \"58b4faf6-d651-4094-b0bd-857e9074d9a9\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.692255 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-erlang-cookie\") pod \"58b4faf6-d651-4094-b0bd-857e9074d9a9\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.692285 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-plugins-conf\") pod \"58b4faf6-d651-4094-b0bd-857e9074d9a9\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.692329 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-config-data\") pod \"58b4faf6-d651-4094-b0bd-857e9074d9a9\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.692359 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/58b4faf6-d651-4094-b0bd-857e9074d9a9-erlang-cookie-secret\") pod \"58b4faf6-d651-4094-b0bd-857e9074d9a9\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.692551 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-server-conf\") pod \"58b4faf6-d651-4094-b0bd-857e9074d9a9\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.692620 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-tls\") pod \"58b4faf6-d651-4094-b0bd-857e9074d9a9\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.693464 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "58b4faf6-d651-4094-b0bd-857e9074d9a9" (UID: "58b4faf6-d651-4094-b0bd-857e9074d9a9"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.693706 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\") pod \"58b4faf6-d651-4094-b0bd-857e9074d9a9\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.693776 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-plugins\") pod \"58b4faf6-d651-4094-b0bd-857e9074d9a9\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.693858 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwprw\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-kube-api-access-dwprw\") pod \"58b4faf6-d651-4094-b0bd-857e9074d9a9\" (UID: \"58b4faf6-d651-4094-b0bd-857e9074d9a9\") " Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.694347 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "58b4faf6-d651-4094-b0bd-857e9074d9a9" (UID: "58b4faf6-d651-4094-b0bd-857e9074d9a9"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.694601 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.694615 4755 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.695930 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "58b4faf6-d651-4094-b0bd-857e9074d9a9" (UID: "58b4faf6-d651-4094-b0bd-857e9074d9a9"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.698333 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/58b4faf6-d651-4094-b0bd-857e9074d9a9-pod-info" (OuterVolumeSpecName: "pod-info") pod "58b4faf6-d651-4094-b0bd-857e9074d9a9" (UID: "58b4faf6-d651-4094-b0bd-857e9074d9a9"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.702712 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-kube-api-access-dwprw" (OuterVolumeSpecName: "kube-api-access-dwprw") pod "58b4faf6-d651-4094-b0bd-857e9074d9a9" (UID: "58b4faf6-d651-4094-b0bd-857e9074d9a9"). InnerVolumeSpecName "kube-api-access-dwprw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.703225 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "58b4faf6-d651-4094-b0bd-857e9074d9a9" (UID: "58b4faf6-d651-4094-b0bd-857e9074d9a9"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.708972 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58b4faf6-d651-4094-b0bd-857e9074d9a9-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "58b4faf6-d651-4094-b0bd-857e9074d9a9" (UID: "58b4faf6-d651-4094-b0bd-857e9074d9a9"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.735499 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998" (OuterVolumeSpecName: "persistence") pod "58b4faf6-d651-4094-b0bd-857e9074d9a9" (UID: "58b4faf6-d651-4094-b0bd-857e9074d9a9"). InnerVolumeSpecName "pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.772460 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-config-data" (OuterVolumeSpecName: "config-data") pod "58b4faf6-d651-4094-b0bd-857e9074d9a9" (UID: "58b4faf6-d651-4094-b0bd-857e9074d9a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.799405 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.799473 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\") on node \"crc\" " Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.799490 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.799501 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwprw\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-kube-api-access-dwprw\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.799511 4755 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/58b4faf6-d651-4094-b0bd-857e9074d9a9-pod-info\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.799518 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.799526 4755 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/58b4faf6-d651-4094-b0bd-857e9074d9a9-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.829085 4755 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.829257 4755 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998") on node "crc" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.834414 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-server-conf" (OuterVolumeSpecName: "server-conf") pod "58b4faf6-d651-4094-b0bd-857e9074d9a9" (UID: "58b4faf6-d651-4094-b0bd-857e9074d9a9"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.902558 4755 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/58b4faf6-d651-4094-b0bd-857e9074d9a9-server-conf\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.902588 4755 reconciler_common.go:293] "Volume detached for volume \"pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.921564 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "58b4faf6-d651-4094-b0bd-857e9074d9a9" (UID: "58b4faf6-d651-4094-b0bd-857e9074d9a9"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.936840 4755 generic.go:334] "Generic (PLEG): container finished" podID="358a528d-56dd-4737-af2c-750423bbdc56" containerID="fd778f826ddd9aa8888475dc8bfa3d0d58355f61057d10f8903274d71add7300" exitCode=0 Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.936888 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"358a528d-56dd-4737-af2c-750423bbdc56","Type":"ContainerDied","Data":"fd778f826ddd9aa8888475dc8bfa3d0d58355f61057d10f8903274d71add7300"} Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.940382 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f2efd24-009b-4ad3-a07f-7d1d583e4bee","Type":"ContainerStarted","Data":"e5189a67ac8f9776a37f9cf99af81573fbfe6876f41c35ef0d087466deccc4e8"} Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.940507 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.946107 4755 generic.go:334] "Generic (PLEG): container finished" podID="58b4faf6-d651-4094-b0bd-857e9074d9a9" containerID="002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393" exitCode=0 Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.946524 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.947305 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"58b4faf6-d651-4094-b0bd-857e9074d9a9","Type":"ContainerDied","Data":"002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393"} Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.947442 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"58b4faf6-d651-4094-b0bd-857e9074d9a9","Type":"ContainerDied","Data":"453aebe606241b7fd42c613ff692d7e396c337778248432aca99de35ecfea722"} Feb 02 22:57:40 crc kubenswrapper[4755]: I0202 22:57:40.947462 4755 scope.go:117] "RemoveContainer" containerID="002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.005849 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/58b4faf6-d651-4094-b0bd-857e9074d9a9-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.026970 4755 scope.go:117] "RemoveContainer" containerID="6fb82bf5a8c394829ac2dcdcea105aa583faa34495f7dfb958a9981f5423ecdc" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.041530 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.195229061 podStartE2EDuration="10.041507806s" podCreationTimestamp="2026-02-02 22:57:31 +0000 UTC" firstStartedPulling="2026-02-02 22:57:32.002848876 +0000 UTC m=+1407.694069202" lastFinishedPulling="2026-02-02 22:57:39.849127621 +0000 UTC m=+1415.540347947" observedRunningTime="2026-02-02 22:57:40.973893957 +0000 UTC m=+1416.665114283" watchObservedRunningTime="2026-02-02 22:57:41.041507806 +0000 UTC m=+1416.732728132" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.048097 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.068177 4755 scope.go:117] "RemoveContainer" containerID="002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393" Feb 02 22:57:41 crc kubenswrapper[4755]: E0202 22:57:41.070028 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393\": container with ID starting with 002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393 not found: ID does not exist" containerID="002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.070085 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393"} err="failed to get container status \"002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393\": rpc error: code = NotFound desc = could not find container \"002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393\": container with ID starting with 002b1f763e47804c8b4feb74019e92a1de322df920284ef5d854c53f898d3393 not found: ID does not exist" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.070116 4755 scope.go:117] "RemoveContainer" containerID="6fb82bf5a8c394829ac2dcdcea105aa583faa34495f7dfb958a9981f5423ecdc" Feb 02 22:57:41 crc kubenswrapper[4755]: E0202 22:57:41.070589 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fb82bf5a8c394829ac2dcdcea105aa583faa34495f7dfb958a9981f5423ecdc\": container with ID starting with 6fb82bf5a8c394829ac2dcdcea105aa583faa34495f7dfb958a9981f5423ecdc not found: ID does not exist" containerID="6fb82bf5a8c394829ac2dcdcea105aa583faa34495f7dfb958a9981f5423ecdc" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.070639 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fb82bf5a8c394829ac2dcdcea105aa583faa34495f7dfb958a9981f5423ecdc"} err="failed to get container status \"6fb82bf5a8c394829ac2dcdcea105aa583faa34495f7dfb958a9981f5423ecdc\": rpc error: code = NotFound desc = could not find container \"6fb82bf5a8c394829ac2dcdcea105aa583faa34495f7dfb958a9981f5423ecdc\": container with ID starting with 6fb82bf5a8c394829ac2dcdcea105aa583faa34495f7dfb958a9981f5423ecdc not found: ID does not exist" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.085043 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.110796 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 22:57:41 crc kubenswrapper[4755]: E0202 22:57:41.111460 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58b4faf6-d651-4094-b0bd-857e9074d9a9" containerName="setup-container" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.111478 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="58b4faf6-d651-4094-b0bd-857e9074d9a9" containerName="setup-container" Feb 02 22:57:41 crc kubenswrapper[4755]: E0202 22:57:41.111511 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58b4faf6-d651-4094-b0bd-857e9074d9a9" containerName="rabbitmq" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.111542 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="58b4faf6-d651-4094-b0bd-857e9074d9a9" containerName="rabbitmq" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.111756 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="58b4faf6-d651-4094-b0bd-857e9074d9a9" containerName="rabbitmq" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.112884 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.115824 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-jlfgv" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.115984 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.116000 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.116093 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.116187 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.116214 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.116343 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.128864 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.166762 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.210981 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4a28287-a8ee-439b-a1af-927b8819a6ae-config-data\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.211045 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a4a28287-a8ee-439b-a1af-927b8819a6ae-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.211074 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a4a28287-a8ee-439b-a1af-927b8819a6ae-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.211147 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.211171 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a4a28287-a8ee-439b-a1af-927b8819a6ae-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.211249 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq7rl\" (UniqueName: \"kubernetes.io/projected/a4a28287-a8ee-439b-a1af-927b8819a6ae-kube-api-access-fq7rl\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.211279 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a4a28287-a8ee-439b-a1af-927b8819a6ae-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.211352 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a4a28287-a8ee-439b-a1af-927b8819a6ae-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.211371 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a4a28287-a8ee-439b-a1af-927b8819a6ae-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.211392 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a4a28287-a8ee-439b-a1af-927b8819a6ae-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.211413 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a4a28287-a8ee-439b-a1af-927b8819a6ae-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.312982 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-config-data\") pod \"358a528d-56dd-4737-af2c-750423bbdc56\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.313070 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-confd\") pod \"358a528d-56dd-4737-af2c-750423bbdc56\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.313093 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/358a528d-56dd-4737-af2c-750423bbdc56-pod-info\") pod \"358a528d-56dd-4737-af2c-750423bbdc56\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.313992 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\") pod \"358a528d-56dd-4737-af2c-750423bbdc56\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314038 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-tls\") pod \"358a528d-56dd-4737-af2c-750423bbdc56\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314139 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-plugins\") pod \"358a528d-56dd-4737-af2c-750423bbdc56\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314162 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/358a528d-56dd-4737-af2c-750423bbdc56-erlang-cookie-secret\") pod \"358a528d-56dd-4737-af2c-750423bbdc56\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314203 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-plugins-conf\") pod \"358a528d-56dd-4737-af2c-750423bbdc56\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314249 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-erlang-cookie\") pod \"358a528d-56dd-4737-af2c-750423bbdc56\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314268 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-server-conf\") pod \"358a528d-56dd-4737-af2c-750423bbdc56\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314318 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbvf8\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-kube-api-access-xbvf8\") pod \"358a528d-56dd-4737-af2c-750423bbdc56\" (UID: \"358a528d-56dd-4737-af2c-750423bbdc56\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314684 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq7rl\" (UniqueName: \"kubernetes.io/projected/a4a28287-a8ee-439b-a1af-927b8819a6ae-kube-api-access-fq7rl\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314723 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a4a28287-a8ee-439b-a1af-927b8819a6ae-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314793 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a4a28287-a8ee-439b-a1af-927b8819a6ae-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314837 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a4a28287-a8ee-439b-a1af-927b8819a6ae-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314860 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a4a28287-a8ee-439b-a1af-927b8819a6ae-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314883 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a4a28287-a8ee-439b-a1af-927b8819a6ae-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314924 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4a28287-a8ee-439b-a1af-927b8819a6ae-config-data\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314951 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a4a28287-a8ee-439b-a1af-927b8819a6ae-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.314975 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a4a28287-a8ee-439b-a1af-927b8819a6ae-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.315044 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.315068 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a4a28287-a8ee-439b-a1af-927b8819a6ae-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.315090 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "358a528d-56dd-4737-af2c-750423bbdc56" (UID: "358a528d-56dd-4737-af2c-750423bbdc56"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.315472 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a4a28287-a8ee-439b-a1af-927b8819a6ae-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.317739 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "358a528d-56dd-4737-af2c-750423bbdc56" (UID: "358a528d-56dd-4737-af2c-750423bbdc56"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.320959 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/358a528d-56dd-4737-af2c-750423bbdc56-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "358a528d-56dd-4737-af2c-750423bbdc56" (UID: "358a528d-56dd-4737-af2c-750423bbdc56"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.321144 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a4a28287-a8ee-439b-a1af-927b8819a6ae-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.321333 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a4a28287-a8ee-439b-a1af-927b8819a6ae-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.321986 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "358a528d-56dd-4737-af2c-750423bbdc56" (UID: "358a528d-56dd-4737-af2c-750423bbdc56"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.323129 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4a28287-a8ee-439b-a1af-927b8819a6ae-config-data\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.325575 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a4a28287-a8ee-439b-a1af-927b8819a6ae-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.326433 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a4a28287-a8ee-439b-a1af-927b8819a6ae-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.326482 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a4a28287-a8ee-439b-a1af-927b8819a6ae-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.329439 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.329492 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/dc3c1b15045b3f4e3ad98c980816f9ce7e3f9051073beec991ed4f7eea0a77f6/globalmount\"" pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.335032 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a4a28287-a8ee-439b-a1af-927b8819a6ae-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.337266 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/358a528d-56dd-4737-af2c-750423bbdc56-pod-info" (OuterVolumeSpecName: "pod-info") pod "358a528d-56dd-4737-af2c-750423bbdc56" (UID: "358a528d-56dd-4737-af2c-750423bbdc56"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.337435 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "358a528d-56dd-4737-af2c-750423bbdc56" (UID: "358a528d-56dd-4737-af2c-750423bbdc56"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.337965 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-kube-api-access-xbvf8" (OuterVolumeSpecName: "kube-api-access-xbvf8") pod "358a528d-56dd-4737-af2c-750423bbdc56" (UID: "358a528d-56dd-4737-af2c-750423bbdc56"). InnerVolumeSpecName "kube-api-access-xbvf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.346886 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a4a28287-a8ee-439b-a1af-927b8819a6ae-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.353977 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq7rl\" (UniqueName: \"kubernetes.io/projected/a4a28287-a8ee-439b-a1af-927b8819a6ae-kube-api-access-fq7rl\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.363228 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7" (OuterVolumeSpecName: "persistence") pod "358a528d-56dd-4737-af2c-750423bbdc56" (UID: "358a528d-56dd-4737-af2c-750423bbdc56"). InnerVolumeSpecName "pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.374212 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-config-data" (OuterVolumeSpecName: "config-data") pod "358a528d-56dd-4737-af2c-750423bbdc56" (UID: "358a528d-56dd-4737-af2c-750423bbdc56"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.403470 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6c6ba7a0-0c69-480f-8565-6d5ef532d998\") pod \"rabbitmq-server-0\" (UID: \"a4a28287-a8ee-439b-a1af-927b8819a6ae\") " pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.420493 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.420545 4755 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/358a528d-56dd-4737-af2c-750423bbdc56-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.420558 4755 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.420574 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.420586 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbvf8\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-kube-api-access-xbvf8\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.420595 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.420622 4755 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/358a528d-56dd-4737-af2c-750423bbdc56-pod-info\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.420654 4755 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\") on node \"crc\" " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.420827 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.455781 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.458510 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-server-conf" (OuterVolumeSpecName: "server-conf") pod "358a528d-56dd-4737-af2c-750423bbdc56" (UID: "358a528d-56dd-4737-af2c-750423bbdc56"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.462323 4755 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.462495 4755 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7") on node "crc" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.486168 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.521975 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-combined-ca-bundle\") pod \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.522108 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-config-data\") pod \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.522133 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-scripts\") pod \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.522246 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-certs\") pod \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.522303 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zndgh\" (UniqueName: \"kubernetes.io/projected/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-kube-api-access-zndgh\") pod \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\" (UID: \"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261\") " Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.523088 4755 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/358a528d-56dd-4737-af2c-750423bbdc56-server-conf\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.523119 4755 reconciler_common.go:293] "Volume detached for volume \"pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.525399 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-scripts" (OuterVolumeSpecName: "scripts") pod "dadba444-a0a5-4b6f-8d3b-9c2ed25f1261" (UID: "dadba444-a0a5-4b6f-8d3b-9c2ed25f1261"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.526459 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-certs" (OuterVolumeSpecName: "certs") pod "dadba444-a0a5-4b6f-8d3b-9c2ed25f1261" (UID: "dadba444-a0a5-4b6f-8d3b-9c2ed25f1261"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.526843 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-kube-api-access-zndgh" (OuterVolumeSpecName: "kube-api-access-zndgh") pod "dadba444-a0a5-4b6f-8d3b-9c2ed25f1261" (UID: "dadba444-a0a5-4b6f-8d3b-9c2ed25f1261"). InnerVolumeSpecName "kube-api-access-zndgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.530905 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "358a528d-56dd-4737-af2c-750423bbdc56" (UID: "358a528d-56dd-4737-af2c-750423bbdc56"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.555675 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dadba444-a0a5-4b6f-8d3b-9c2ed25f1261" (UID: "dadba444-a0a5-4b6f-8d3b-9c2ed25f1261"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.557691 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-config-data" (OuterVolumeSpecName: "config-data") pod "dadba444-a0a5-4b6f-8d3b-9c2ed25f1261" (UID: "dadba444-a0a5-4b6f-8d3b-9c2ed25f1261"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.626707 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.626995 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.627005 4755 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.627014 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zndgh\" (UniqueName: \"kubernetes.io/projected/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-kube-api-access-zndgh\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.627023 4755 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/358a528d-56dd-4737-af2c-750423bbdc56-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.627050 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:41 crc kubenswrapper[4755]: W0202 22:57:41.954443 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4a28287_a8ee_439b_a1af_927b8819a6ae.slice/crio-8e6f20ee610e9e4353494501a64c72d6bc057fe2ebec3bbdfa1d8f8ccf747f4f WatchSource:0}: Error finding container 8e6f20ee610e9e4353494501a64c72d6bc057fe2ebec3bbdfa1d8f8ccf747f4f: Status 404 returned error can't find the container with id 8e6f20ee610e9e4353494501a64c72d6bc057fe2ebec3bbdfa1d8f8ccf747f4f Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.956536 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.966076 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"358a528d-56dd-4737-af2c-750423bbdc56","Type":"ContainerDied","Data":"584d341809efd0db4757315d09a4d27356e8ab74aba11130a6d3901d16df1c91"} Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.966132 4755 scope.go:117] "RemoveContainer" containerID="fd778f826ddd9aa8888475dc8bfa3d0d58355f61057d10f8903274d71add7300" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.966313 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.972780 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-6nq66" event={"ID":"dadba444-a0a5-4b6f-8d3b-9c2ed25f1261","Type":"ContainerDied","Data":"701675e63f6cb487722ed6740c3c4d3a11bc94b06fcc1af6de21bcecf501a7cb"} Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.972815 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="701675e63f6cb487722ed6740c3c4d3a11bc94b06fcc1af6de21bcecf501a7cb" Feb 02 22:57:41 crc kubenswrapper[4755]: I0202 22:57:41.972812 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-6nq66" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.055017 4755 scope.go:117] "RemoveContainer" containerID="031ee13fb5a9e9bc160f26ee98bb344b428a4611b475b3d538d6bc14c4c2261f" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.078096 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.093164 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.110149 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.110400 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="1d3cae78-0c3c-404d-b13d-5595f3a10ddc" containerName="cloudkitty-proc" containerID="cri-o://d3f30a550f0e5b52af18a2bd4ad31fb954e7af66400659e87b406a0171614f4d" gracePeriod=30 Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.145320 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.150780 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="be9e900a-a2d8-4291-9817-fccd96e11436" containerName="cloudkitty-api-log" containerID="cri-o://3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741" gracePeriod=30 Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.151279 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="be9e900a-a2d8-4291-9817-fccd96e11436" containerName="cloudkitty-api" containerID="cri-o://557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386" gracePeriod=30 Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.179609 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 22:57:42 crc kubenswrapper[4755]: E0202 22:57:42.180155 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dadba444-a0a5-4b6f-8d3b-9c2ed25f1261" containerName="cloudkitty-storageinit" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.180174 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="dadba444-a0a5-4b6f-8d3b-9c2ed25f1261" containerName="cloudkitty-storageinit" Feb 02 22:57:42 crc kubenswrapper[4755]: E0202 22:57:42.180184 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="358a528d-56dd-4737-af2c-750423bbdc56" containerName="rabbitmq" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.180190 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="358a528d-56dd-4737-af2c-750423bbdc56" containerName="rabbitmq" Feb 02 22:57:42 crc kubenswrapper[4755]: E0202 22:57:42.180218 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="358a528d-56dd-4737-af2c-750423bbdc56" containerName="setup-container" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.180225 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="358a528d-56dd-4737-af2c-750423bbdc56" containerName="setup-container" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.181008 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="dadba444-a0a5-4b6f-8d3b-9c2ed25f1261" containerName="cloudkitty-storageinit" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.181030 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="358a528d-56dd-4737-af2c-750423bbdc56" containerName="rabbitmq" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.182835 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.190691 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.190953 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.190842 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.193020 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.193232 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.193342 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.193554 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.195978 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-mr7lj" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.249979 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.250027 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.250079 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.250109 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.250130 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hln7r\" (UniqueName: \"kubernetes.io/projected/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-kube-api-access-hln7r\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.250236 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.250270 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.250299 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.250327 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.250391 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.250416 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.352216 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.352318 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.352346 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.352391 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hln7r\" (UniqueName: \"kubernetes.io/projected/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-kube-api-access-hln7r\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.352587 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.352663 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.352715 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.352775 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.352973 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.353121 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.353351 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.353383 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.353533 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.353614 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.354204 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.357086 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.357099 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.357263 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.358525 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.359315 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.359882 4755 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.359975 4755 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e6532bd52b9aa617ec4882185abeecf9678960789d6649da4c0d87ef9f673b0b/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.375892 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hln7r\" (UniqueName: \"kubernetes.io/projected/e09fc49d-8b5d-4775-a549-b5ca23d3d13e-kube-api-access-hln7r\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.411738 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25f996cf-5682-4c06-a9f5-a99e8be0e4e7\") pod \"rabbitmq-cell1-server-0\" (UID: \"e09fc49d-8b5d-4775-a549-b5ca23d3d13e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.543669 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.848791 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-dc7c944bf-f6zml"] Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.852441 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.854347 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.865995 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dc7c944bf-f6zml"] Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.965858 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-dns-swift-storage-0\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.966128 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-dns-svc\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.966167 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q74jl\" (UniqueName: \"kubernetes.io/projected/08987794-2101-4138-8efe-915de10da5be-kube-api-access-q74jl\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.966191 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-openstack-edpm-ipam\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.966205 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-ovsdbserver-sb\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.966335 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-config\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.966353 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-ovsdbserver-nb\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.995402 4755 generic.go:334] "Generic (PLEG): container finished" podID="be9e900a-a2d8-4291-9817-fccd96e11436" containerID="3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741" exitCode=143 Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.995485 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"be9e900a-a2d8-4291-9817-fccd96e11436","Type":"ContainerDied","Data":"3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741"} Feb 02 22:57:42 crc kubenswrapper[4755]: I0202 22:57:42.998650 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a4a28287-a8ee-439b-a1af-927b8819a6ae","Type":"ContainerStarted","Data":"8e6f20ee610e9e4353494501a64c72d6bc057fe2ebec3bbdfa1d8f8ccf747f4f"} Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.003142 4755 generic.go:334] "Generic (PLEG): container finished" podID="1d3cae78-0c3c-404d-b13d-5595f3a10ddc" containerID="d3f30a550f0e5b52af18a2bd4ad31fb954e7af66400659e87b406a0171614f4d" exitCode=0 Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.003235 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"1d3cae78-0c3c-404d-b13d-5595f3a10ddc","Type":"ContainerDied","Data":"d3f30a550f0e5b52af18a2bd4ad31fb954e7af66400659e87b406a0171614f4d"} Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.071624 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-config\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.071677 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-ovsdbserver-nb\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.071764 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-dns-swift-storage-0\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.071795 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-dns-svc\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.071855 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q74jl\" (UniqueName: \"kubernetes.io/projected/08987794-2101-4138-8efe-915de10da5be-kube-api-access-q74jl\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.071882 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-ovsdbserver-sb\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.071904 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-openstack-edpm-ipam\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.072818 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-openstack-edpm-ipam\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.073327 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-config\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.074163 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-dns-svc\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.074175 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-ovsdbserver-sb\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.075170 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-ovsdbserver-nb\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.079265 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-dns-swift-storage-0\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.090982 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="358a528d-56dd-4737-af2c-750423bbdc56" path="/var/lib/kubelet/pods/358a528d-56dd-4737-af2c-750423bbdc56/volumes" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.094624 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58b4faf6-d651-4094-b0bd-857e9074d9a9" path="/var/lib/kubelet/pods/58b4faf6-d651-4094-b0bd-857e9074d9a9/volumes" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.097794 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.179240 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q74jl\" (UniqueName: \"kubernetes.io/projected/08987794-2101-4138-8efe-915de10da5be-kube-api-access-q74jl\") pod \"dnsmasq-dns-dc7c944bf-f6zml\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: W0202 22:57:43.183429 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode09fc49d_8b5d_4775_a549_b5ca23d3d13e.slice/crio-a957c4d8ccaa1e59943713d35731cdbcdee8c350e5b5ea585454df575572adc9 WatchSource:0}: Error finding container a957c4d8ccaa1e59943713d35731cdbcdee8c350e5b5ea585454df575572adc9: Status 404 returned error can't find the container with id a957c4d8ccaa1e59943713d35731cdbcdee8c350e5b5ea585454df575572adc9 Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.240499 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.533251 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.587462 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-combined-ca-bundle\") pod \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.587561 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-config-data-custom\") pod \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.587617 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-scripts\") pod \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.587709 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-certs\") pod \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.587765 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-config-data\") pod \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.587784 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfnfj\" (UniqueName: \"kubernetes.io/projected/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-kube-api-access-bfnfj\") pod \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\" (UID: \"1d3cae78-0c3c-404d-b13d-5595f3a10ddc\") " Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.679951 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-scripts" (OuterVolumeSpecName: "scripts") pod "1d3cae78-0c3c-404d-b13d-5595f3a10ddc" (UID: "1d3cae78-0c3c-404d-b13d-5595f3a10ddc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.679976 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-kube-api-access-bfnfj" (OuterVolumeSpecName: "kube-api-access-bfnfj") pod "1d3cae78-0c3c-404d-b13d-5595f3a10ddc" (UID: "1d3cae78-0c3c-404d-b13d-5595f3a10ddc"). InnerVolumeSpecName "kube-api-access-bfnfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.680974 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-certs" (OuterVolumeSpecName: "certs") pod "1d3cae78-0c3c-404d-b13d-5595f3a10ddc" (UID: "1d3cae78-0c3c-404d-b13d-5595f3a10ddc"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.684904 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1d3cae78-0c3c-404d-b13d-5595f3a10ddc" (UID: "1d3cae78-0c3c-404d-b13d-5595f3a10ddc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.701046 4755 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.701076 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.701084 4755 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.701093 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfnfj\" (UniqueName: \"kubernetes.io/projected/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-kube-api-access-bfnfj\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.715882 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-config-data" (OuterVolumeSpecName: "config-data") pod "1d3cae78-0c3c-404d-b13d-5595f3a10ddc" (UID: "1d3cae78-0c3c-404d-b13d-5595f3a10ddc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.732904 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d3cae78-0c3c-404d-b13d-5595f3a10ddc" (UID: "1d3cae78-0c3c-404d-b13d-5595f3a10ddc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.831402 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.831439 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d3cae78-0c3c-404d-b13d-5595f3a10ddc-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:43 crc kubenswrapper[4755]: I0202 22:57:43.953000 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dc7c944bf-f6zml"] Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.044006 4755 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-api-0" podUID="be9e900a-a2d8-4291-9817-fccd96e11436" containerName="cloudkitty-api" probeResult="failure" output="Get \"https://10.217.0.195:8889/healthcheck\": read tcp 10.217.0.2:57256->10.217.0.195:8889: read: connection reset by peer" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.049622 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e09fc49d-8b5d-4775-a549-b5ca23d3d13e","Type":"ContainerStarted","Data":"a957c4d8ccaa1e59943713d35731cdbcdee8c350e5b5ea585454df575572adc9"} Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.101454 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a4a28287-a8ee-439b-a1af-927b8819a6ae","Type":"ContainerStarted","Data":"45df599ad9e3cf32e1e6d1ce9c92ebc437a64aa46f5bcf98e86ba902a77bcdf1"} Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.117598 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"1d3cae78-0c3c-404d-b13d-5595f3a10ddc","Type":"ContainerDied","Data":"7b9f403f3ffe2917fe40995f0b922ed9943ad4baded6970729d8ede8450a64e3"} Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.117680 4755 scope.go:117] "RemoveContainer" containerID="d3f30a550f0e5b52af18a2bd4ad31fb954e7af66400659e87b406a0171614f4d" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.117877 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.126823 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" event={"ID":"08987794-2101-4138-8efe-915de10da5be","Type":"ContainerStarted","Data":"288cbc3fbed4422f71c08c547314b913648ce947b10ec9b23aee491ac742fdfa"} Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.302865 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.336760 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.348008 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:57:44 crc kubenswrapper[4755]: E0202 22:57:44.348507 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d3cae78-0c3c-404d-b13d-5595f3a10ddc" containerName="cloudkitty-proc" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.348520 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d3cae78-0c3c-404d-b13d-5595f3a10ddc" containerName="cloudkitty-proc" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.348823 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d3cae78-0c3c-404d-b13d-5595f3a10ddc" containerName="cloudkitty-proc" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.349623 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.352548 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.361757 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.462660 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rfqj4" podUID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerName="registry-server" probeResult="failure" output=< Feb 02 22:57:44 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Feb 02 22:57:44 crc kubenswrapper[4755]: > Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.473764 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.473942 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-config-data\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.474045 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-scripts\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.474128 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlk9l\" (UniqueName: \"kubernetes.io/projected/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-kube-api-access-zlk9l\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.474214 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.474416 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-certs\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.559902 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.576406 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-certs\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.576616 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.576789 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-config-data\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.576941 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-scripts\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.577058 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlk9l\" (UniqueName: \"kubernetes.io/projected/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-kube-api-access-zlk9l\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.577147 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.588143 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.590170 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-certs\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.590541 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-scripts\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.598373 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.604797 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlk9l\" (UniqueName: \"kubernetes.io/projected/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-kube-api-access-zlk9l\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.671805 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5-config-data\") pod \"cloudkitty-proc-0\" (UID: \"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5\") " pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.678970 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-combined-ca-bundle\") pod \"be9e900a-a2d8-4291-9817-fccd96e11436\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.679017 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-scripts\") pod \"be9e900a-a2d8-4291-9817-fccd96e11436\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.679124 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/be9e900a-a2d8-4291-9817-fccd96e11436-certs\") pod \"be9e900a-a2d8-4291-9817-fccd96e11436\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.679148 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be9e900a-a2d8-4291-9817-fccd96e11436-logs\") pod \"be9e900a-a2d8-4291-9817-fccd96e11436\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.679207 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nz96t\" (UniqueName: \"kubernetes.io/projected/be9e900a-a2d8-4291-9817-fccd96e11436-kube-api-access-nz96t\") pod \"be9e900a-a2d8-4291-9817-fccd96e11436\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.679286 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-internal-tls-certs\") pod \"be9e900a-a2d8-4291-9817-fccd96e11436\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.679318 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-config-data-custom\") pod \"be9e900a-a2d8-4291-9817-fccd96e11436\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.679381 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-public-tls-certs\") pod \"be9e900a-a2d8-4291-9817-fccd96e11436\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.679450 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-config-data\") pod \"be9e900a-a2d8-4291-9817-fccd96e11436\" (UID: \"be9e900a-a2d8-4291-9817-fccd96e11436\") " Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.679561 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be9e900a-a2d8-4291-9817-fccd96e11436-logs" (OuterVolumeSpecName: "logs") pod "be9e900a-a2d8-4291-9817-fccd96e11436" (UID: "be9e900a-a2d8-4291-9817-fccd96e11436"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.679949 4755 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be9e900a-a2d8-4291-9817-fccd96e11436-logs\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.680429 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.687315 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be9e900a-a2d8-4291-9817-fccd96e11436-certs" (OuterVolumeSpecName: "certs") pod "be9e900a-a2d8-4291-9817-fccd96e11436" (UID: "be9e900a-a2d8-4291-9817-fccd96e11436"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.687911 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be9e900a-a2d8-4291-9817-fccd96e11436-kube-api-access-nz96t" (OuterVolumeSpecName: "kube-api-access-nz96t") pod "be9e900a-a2d8-4291-9817-fccd96e11436" (UID: "be9e900a-a2d8-4291-9817-fccd96e11436"). InnerVolumeSpecName "kube-api-access-nz96t". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.689971 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "be9e900a-a2d8-4291-9817-fccd96e11436" (UID: "be9e900a-a2d8-4291-9817-fccd96e11436"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.690487 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-scripts" (OuterVolumeSpecName: "scripts") pod "be9e900a-a2d8-4291-9817-fccd96e11436" (UID: "be9e900a-a2d8-4291-9817-fccd96e11436"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.729556 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be9e900a-a2d8-4291-9817-fccd96e11436" (UID: "be9e900a-a2d8-4291-9817-fccd96e11436"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.736980 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-config-data" (OuterVolumeSpecName: "config-data") pod "be9e900a-a2d8-4291-9817-fccd96e11436" (UID: "be9e900a-a2d8-4291-9817-fccd96e11436"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.775335 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "be9e900a-a2d8-4291-9817-fccd96e11436" (UID: "be9e900a-a2d8-4291-9817-fccd96e11436"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.782590 4755 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.782630 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.782642 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.782653 4755 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.782664 4755 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/be9e900a-a2d8-4291-9817-fccd96e11436-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.782677 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nz96t\" (UniqueName: \"kubernetes.io/projected/be9e900a-a2d8-4291-9817-fccd96e11436-kube-api-access-nz96t\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.782689 4755 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.809979 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "be9e900a-a2d8-4291-9817-fccd96e11436" (UID: "be9e900a-a2d8-4291-9817-fccd96e11436"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:57:44 crc kubenswrapper[4755]: I0202 22:57:44.933654 4755 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/be9e900a-a2d8-4291-9817-fccd96e11436-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.083718 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d3cae78-0c3c-404d-b13d-5595f3a10ddc" path="/var/lib/kubelet/pods/1d3cae78-0c3c-404d-b13d-5595f3a10ddc/volumes" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.141448 4755 generic.go:334] "Generic (PLEG): container finished" podID="08987794-2101-4138-8efe-915de10da5be" containerID="6c1233e6a1a40b69e2fbca5939af32b41351ad71476088a045c58131c4d0500d" exitCode=0 Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.142381 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" event={"ID":"08987794-2101-4138-8efe-915de10da5be","Type":"ContainerDied","Data":"6c1233e6a1a40b69e2fbca5939af32b41351ad71476088a045c58131c4d0500d"} Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.147438 4755 generic.go:334] "Generic (PLEG): container finished" podID="be9e900a-a2d8-4291-9817-fccd96e11436" containerID="557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386" exitCode=0 Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.147517 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"be9e900a-a2d8-4291-9817-fccd96e11436","Type":"ContainerDied","Data":"557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386"} Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.147545 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"be9e900a-a2d8-4291-9817-fccd96e11436","Type":"ContainerDied","Data":"552a5a0aba00da5821318b857a434cd381350faea5ddba038ef29ff4173466f1"} Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.147528 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.147573 4755 scope.go:117] "RemoveContainer" containerID="557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.152676 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e09fc49d-8b5d-4775-a549-b5ca23d3d13e","Type":"ContainerStarted","Data":"5dca39ef4efee90e9812ad358310c9fbb5f1369cd7a3168be1065845bb580261"} Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.266876 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.383258 4755 scope.go:117] "RemoveContainer" containerID="3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.401903 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.414976 4755 scope.go:117] "RemoveContainer" containerID="557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386" Feb 02 22:57:45 crc kubenswrapper[4755]: E0202 22:57:45.417245 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386\": container with ID starting with 557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386 not found: ID does not exist" containerID="557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.417328 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386"} err="failed to get container status \"557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386\": rpc error: code = NotFound desc = could not find container \"557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386\": container with ID starting with 557002d3e000ebf1df46e9340bc73a32fc977d2dc60819b8c346d0ed0d4b7386 not found: ID does not exist" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.417375 4755 scope.go:117] "RemoveContainer" containerID="3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741" Feb 02 22:57:45 crc kubenswrapper[4755]: E0202 22:57:45.419711 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741\": container with ID starting with 3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741 not found: ID does not exist" containerID="3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.419825 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741"} err="failed to get container status \"3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741\": rpc error: code = NotFound desc = could not find container \"3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741\": container with ID starting with 3df824c680bb5f51ba1203758fdd4f8a065590dd78171682d79b9c0f10711741 not found: ID does not exist" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.423982 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.437827 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:57:45 crc kubenswrapper[4755]: E0202 22:57:45.438289 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be9e900a-a2d8-4291-9817-fccd96e11436" containerName="cloudkitty-api" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.438305 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="be9e900a-a2d8-4291-9817-fccd96e11436" containerName="cloudkitty-api" Feb 02 22:57:45 crc kubenswrapper[4755]: E0202 22:57:45.438320 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be9e900a-a2d8-4291-9817-fccd96e11436" containerName="cloudkitty-api-log" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.438327 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="be9e900a-a2d8-4291-9817-fccd96e11436" containerName="cloudkitty-api-log" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.438559 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="be9e900a-a2d8-4291-9817-fccd96e11436" containerName="cloudkitty-api-log" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.438592 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="be9e900a-a2d8-4291-9817-fccd96e11436" containerName="cloudkitty-api" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.439692 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.448065 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.461218 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.461618 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.461756 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.556598 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.556650 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8c6396a-eb16-427b-8b30-07e7ad4d0415-logs\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.560311 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.560342 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.560404 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hqm5\" (UniqueName: \"kubernetes.io/projected/f8c6396a-eb16-427b-8b30-07e7ad4d0415-kube-api-access-9hqm5\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.560471 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f8c6396a-eb16-427b-8b30-07e7ad4d0415-certs\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.560500 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-config-data\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.560530 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-scripts\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.560643 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.663440 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.663496 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.663572 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hqm5\" (UniqueName: \"kubernetes.io/projected/f8c6396a-eb16-427b-8b30-07e7ad4d0415-kube-api-access-9hqm5\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.664099 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f8c6396a-eb16-427b-8b30-07e7ad4d0415-certs\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.664139 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-config-data\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.664177 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-scripts\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.664288 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.664372 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.664397 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8c6396a-eb16-427b-8b30-07e7ad4d0415-logs\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.665011 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8c6396a-eb16-427b-8b30-07e7ad4d0415-logs\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.668353 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.668946 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f8c6396a-eb16-427b-8b30-07e7ad4d0415-certs\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.669065 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.669468 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-scripts\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.668296 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.679787 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-config-data\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.680221 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8c6396a-eb16-427b-8b30-07e7ad4d0415-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.687647 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hqm5\" (UniqueName: \"kubernetes.io/projected/f8c6396a-eb16-427b-8b30-07e7ad4d0415-kube-api-access-9hqm5\") pod \"cloudkitty-api-0\" (UID: \"f8c6396a-eb16-427b-8b30-07e7ad4d0415\") " pod="openstack/cloudkitty-api-0" Feb 02 22:57:45 crc kubenswrapper[4755]: I0202 22:57:45.772000 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Feb 02 22:57:46 crc kubenswrapper[4755]: I0202 22:57:46.166449 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5","Type":"ContainerStarted","Data":"5dd7b06ee8ed428a73b248172f04d2eda06d5af7e0cd0c8e38cc07e8282094b8"} Feb 02 22:57:46 crc kubenswrapper[4755]: I0202 22:57:46.166830 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5","Type":"ContainerStarted","Data":"a0f42d8f0a72b73780e7f9a84f43c7bb25d474f0cf6b08371e4acf6054e33057"} Feb 02 22:57:46 crc kubenswrapper[4755]: I0202 22:57:46.169405 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" event={"ID":"08987794-2101-4138-8efe-915de10da5be","Type":"ContainerStarted","Data":"3fc18d501e2d47e033382102a5b3a7e985c17be2b7bc804f2ddbb27a543708ee"} Feb 02 22:57:46 crc kubenswrapper[4755]: I0202 22:57:46.169946 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:46 crc kubenswrapper[4755]: I0202 22:57:46.196113 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=2.002825863 podStartE2EDuration="2.196094462s" podCreationTimestamp="2026-02-02 22:57:44 +0000 UTC" firstStartedPulling="2026-02-02 22:57:45.276672155 +0000 UTC m=+1420.967892531" lastFinishedPulling="2026-02-02 22:57:45.469940804 +0000 UTC m=+1421.161161130" observedRunningTime="2026-02-02 22:57:46.184355342 +0000 UTC m=+1421.875575668" watchObservedRunningTime="2026-02-02 22:57:46.196094462 +0000 UTC m=+1421.887314788" Feb 02 22:57:46 crc kubenswrapper[4755]: I0202 22:57:46.208570 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" podStartSLOduration=4.208553952 podStartE2EDuration="4.208553952s" podCreationTimestamp="2026-02-02 22:57:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:57:46.203633094 +0000 UTC m=+1421.894853430" watchObservedRunningTime="2026-02-02 22:57:46.208553952 +0000 UTC m=+1421.899774268" Feb 02 22:57:46 crc kubenswrapper[4755]: I0202 22:57:46.356015 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Feb 02 22:57:47 crc kubenswrapper[4755]: I0202 22:57:47.082018 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be9e900a-a2d8-4291-9817-fccd96e11436" path="/var/lib/kubelet/pods/be9e900a-a2d8-4291-9817-fccd96e11436/volumes" Feb 02 22:57:47 crc kubenswrapper[4755]: I0202 22:57:47.182346 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"f8c6396a-eb16-427b-8b30-07e7ad4d0415","Type":"ContainerStarted","Data":"eb191e196911b42c94f7a904bc34371d714adb7faac487a038ad00c8ad7fb4d8"} Feb 02 22:57:47 crc kubenswrapper[4755]: I0202 22:57:47.182386 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"f8c6396a-eb16-427b-8b30-07e7ad4d0415","Type":"ContainerStarted","Data":"fb31ecb5b4abb53e34011b7ffe93833c9ffdb801eb04d9bc48b7f50084a578a7"} Feb 02 22:57:47 crc kubenswrapper[4755]: I0202 22:57:47.182395 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"f8c6396a-eb16-427b-8b30-07e7ad4d0415","Type":"ContainerStarted","Data":"606532f0f394b9b6b407f6d610635b301f3de709188a74f6348ae08457095d8d"} Feb 02 22:57:47 crc kubenswrapper[4755]: I0202 22:57:47.182826 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Feb 02 22:57:47 crc kubenswrapper[4755]: I0202 22:57:47.209659 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=2.209638663 podStartE2EDuration="2.209638663s" podCreationTimestamp="2026-02-02 22:57:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:57:47.206378911 +0000 UTC m=+1422.897599237" watchObservedRunningTime="2026-02-02 22:57:47.209638663 +0000 UTC m=+1422.900858979" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.242843 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.368937 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54dd998c-smhc4"] Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.369365 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-54dd998c-smhc4" podUID="a88f43d2-9cf6-43a0-b2bd-f945f279eea2" containerName="dnsmasq-dns" containerID="cri-o://bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d" gracePeriod=10 Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.460520 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.522611 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.580639 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c4b758ff5-78spt"] Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.582471 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.594340 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c4b758ff5-78spt"] Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.654571 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-dns-svc\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.654858 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-config\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.654888 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-ovsdbserver-nb\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.654941 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-openstack-edpm-ipam\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.655083 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-dns-swift-storage-0\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.655352 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xd9q8\" (UniqueName: \"kubernetes.io/projected/1b8a733b-079b-4c38-90c3-6136137f4d40-kube-api-access-xd9q8\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.655404 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-ovsdbserver-sb\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.695501 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rfqj4"] Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.757678 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-dns-svc\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.757761 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-config\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.757823 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-ovsdbserver-nb\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.758182 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-openstack-edpm-ipam\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.758657 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-dns-svc\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.758830 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-config\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.758993 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-ovsdbserver-nb\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.759059 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-openstack-edpm-ipam\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.759186 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-dns-swift-storage-0\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.759292 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xd9q8\" (UniqueName: \"kubernetes.io/projected/1b8a733b-079b-4c38-90c3-6136137f4d40-kube-api-access-xd9q8\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.759329 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-ovsdbserver-sb\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.760109 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-ovsdbserver-sb\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.760331 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1b8a733b-079b-4c38-90c3-6136137f4d40-dns-swift-storage-0\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.789326 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xd9q8\" (UniqueName: \"kubernetes.io/projected/1b8a733b-079b-4c38-90c3-6136137f4d40-kube-api-access-xd9q8\") pod \"dnsmasq-dns-c4b758ff5-78spt\" (UID: \"1b8a733b-079b-4c38-90c3-6136137f4d40\") " pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:53 crc kubenswrapper[4755]: I0202 22:57:53.913354 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.041615 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.167881 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-ovsdbserver-nb\") pod \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.168042 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-dns-swift-storage-0\") pod \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.168166 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-ovsdbserver-sb\") pod \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.168196 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-config\") pod \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.168220 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bb4jx\" (UniqueName: \"kubernetes.io/projected/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-kube-api-access-bb4jx\") pod \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.168244 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-dns-svc\") pod \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\" (UID: \"a88f43d2-9cf6-43a0-b2bd-f945f279eea2\") " Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.179333 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-kube-api-access-bb4jx" (OuterVolumeSpecName: "kube-api-access-bb4jx") pod "a88f43d2-9cf6-43a0-b2bd-f945f279eea2" (UID: "a88f43d2-9cf6-43a0-b2bd-f945f279eea2"). InnerVolumeSpecName "kube-api-access-bb4jx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.223529 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-config" (OuterVolumeSpecName: "config") pod "a88f43d2-9cf6-43a0-b2bd-f945f279eea2" (UID: "a88f43d2-9cf6-43a0-b2bd-f945f279eea2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.226691 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a88f43d2-9cf6-43a0-b2bd-f945f279eea2" (UID: "a88f43d2-9cf6-43a0-b2bd-f945f279eea2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.239791 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a88f43d2-9cf6-43a0-b2bd-f945f279eea2" (UID: "a88f43d2-9cf6-43a0-b2bd-f945f279eea2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.239795 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a88f43d2-9cf6-43a0-b2bd-f945f279eea2" (UID: "a88f43d2-9cf6-43a0-b2bd-f945f279eea2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.241217 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a88f43d2-9cf6-43a0-b2bd-f945f279eea2" (UID: "a88f43d2-9cf6-43a0-b2bd-f945f279eea2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.272409 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.272448 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.272461 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bb4jx\" (UniqueName: \"kubernetes.io/projected/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-kube-api-access-bb4jx\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.272475 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.272486 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.272495 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a88f43d2-9cf6-43a0-b2bd-f945f279eea2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.296746 4755 generic.go:334] "Generic (PLEG): container finished" podID="a88f43d2-9cf6-43a0-b2bd-f945f279eea2" containerID="bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d" exitCode=0 Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.296770 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54dd998c-smhc4" event={"ID":"a88f43d2-9cf6-43a0-b2bd-f945f279eea2","Type":"ContainerDied","Data":"bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d"} Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.296805 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54dd998c-smhc4" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.296819 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54dd998c-smhc4" event={"ID":"a88f43d2-9cf6-43a0-b2bd-f945f279eea2","Type":"ContainerDied","Data":"889dac17318b027c4c3bedd8aae2eea61c50ead1276d2c7a3c1ed336520cd3ac"} Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.296838 4755 scope.go:117] "RemoveContainer" containerID="bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.335647 4755 scope.go:117] "RemoveContainer" containerID="d9f916b5236f92a13e20fb68cb4274cb4be5f7ad83a015f555cb3d24c7feaa24" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.348296 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54dd998c-smhc4"] Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.361476 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-54dd998c-smhc4"] Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.367204 4755 scope.go:117] "RemoveContainer" containerID="bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d" Feb 02 22:57:54 crc kubenswrapper[4755]: E0202 22:57:54.367881 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d\": container with ID starting with bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d not found: ID does not exist" containerID="bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.367920 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d"} err="failed to get container status \"bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d\": rpc error: code = NotFound desc = could not find container \"bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d\": container with ID starting with bc1769ffb1fa778065eeeb65d0ed333f7523f110c8bb1c9f6883892f2461425d not found: ID does not exist" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.367940 4755 scope.go:117] "RemoveContainer" containerID="d9f916b5236f92a13e20fb68cb4274cb4be5f7ad83a015f555cb3d24c7feaa24" Feb 02 22:57:54 crc kubenswrapper[4755]: E0202 22:57:54.368303 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9f916b5236f92a13e20fb68cb4274cb4be5f7ad83a015f555cb3d24c7feaa24\": container with ID starting with d9f916b5236f92a13e20fb68cb4274cb4be5f7ad83a015f555cb3d24c7feaa24 not found: ID does not exist" containerID="d9f916b5236f92a13e20fb68cb4274cb4be5f7ad83a015f555cb3d24c7feaa24" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.368328 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9f916b5236f92a13e20fb68cb4274cb4be5f7ad83a015f555cb3d24c7feaa24"} err="failed to get container status \"d9f916b5236f92a13e20fb68cb4274cb4be5f7ad83a015f555cb3d24c7feaa24\": rpc error: code = NotFound desc = could not find container \"d9f916b5236f92a13e20fb68cb4274cb4be5f7ad83a015f555cb3d24c7feaa24\": container with ID starting with d9f916b5236f92a13e20fb68cb4274cb4be5f7ad83a015f555cb3d24c7feaa24 not found: ID does not exist" Feb 02 22:57:54 crc kubenswrapper[4755]: I0202 22:57:54.379057 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c4b758ff5-78spt"] Feb 02 22:57:55 crc kubenswrapper[4755]: I0202 22:57:55.090082 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a88f43d2-9cf6-43a0-b2bd-f945f279eea2" path="/var/lib/kubelet/pods/a88f43d2-9cf6-43a0-b2bd-f945f279eea2/volumes" Feb 02 22:57:55 crc kubenswrapper[4755]: I0202 22:57:55.311167 4755 generic.go:334] "Generic (PLEG): container finished" podID="1b8a733b-079b-4c38-90c3-6136137f4d40" containerID="b7972608ad0082ef1eee7fe0cb63ee34b02da9268a39d8fc03a6e732270d9a5f" exitCode=0 Feb 02 22:57:55 crc kubenswrapper[4755]: I0202 22:57:55.311267 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c4b758ff5-78spt" event={"ID":"1b8a733b-079b-4c38-90c3-6136137f4d40","Type":"ContainerDied","Data":"b7972608ad0082ef1eee7fe0cb63ee34b02da9268a39d8fc03a6e732270d9a5f"} Feb 02 22:57:55 crc kubenswrapper[4755]: I0202 22:57:55.311359 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c4b758ff5-78spt" event={"ID":"1b8a733b-079b-4c38-90c3-6136137f4d40","Type":"ContainerStarted","Data":"ec7f7c542313eae3e3abc1b5c9e2e49a9046abb59f8dc6d154d6ea4926c04c88"} Feb 02 22:57:55 crc kubenswrapper[4755]: I0202 22:57:55.313513 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rfqj4" podUID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerName="registry-server" containerID="cri-o://2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb" gracePeriod=2 Feb 02 22:57:55 crc kubenswrapper[4755]: I0202 22:57:55.884417 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.014134 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-catalog-content\") pod \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\" (UID: \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\") " Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.014199 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fntp4\" (UniqueName: \"kubernetes.io/projected/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-kube-api-access-fntp4\") pod \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\" (UID: \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\") " Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.014351 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-utilities\") pod \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\" (UID: \"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943\") " Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.015674 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-utilities" (OuterVolumeSpecName: "utilities") pod "6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" (UID: "6d72a2c1-c4d9-4f54-8ca1-aa44762ae943"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.019187 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-kube-api-access-fntp4" (OuterVolumeSpecName: "kube-api-access-fntp4") pod "6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" (UID: "6d72a2c1-c4d9-4f54-8ca1-aa44762ae943"). InnerVolumeSpecName "kube-api-access-fntp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.117151 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.117196 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fntp4\" (UniqueName: \"kubernetes.io/projected/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-kube-api-access-fntp4\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.173035 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" (UID: "6d72a2c1-c4d9-4f54-8ca1-aa44762ae943"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.219138 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.326373 4755 generic.go:334] "Generic (PLEG): container finished" podID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerID="2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb" exitCode=0 Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.326433 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfqj4" event={"ID":"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943","Type":"ContainerDied","Data":"2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb"} Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.326458 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rfqj4" event={"ID":"6d72a2c1-c4d9-4f54-8ca1-aa44762ae943","Type":"ContainerDied","Data":"b365daf56b027d260d8c13f994e6284c54135f001a9c2ca6fcf60da882c8e58a"} Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.326474 4755 scope.go:117] "RemoveContainer" containerID="2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.326488 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rfqj4" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.329464 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c4b758ff5-78spt" event={"ID":"1b8a733b-079b-4c38-90c3-6136137f4d40","Type":"ContainerStarted","Data":"49b652dba3a5ca73529e69e732713a5db0ed9f515acb714dd95ef19b6aff9ffe"} Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.329718 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.358988 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-c4b758ff5-78spt" podStartSLOduration=3.358963831 podStartE2EDuration="3.358963831s" podCreationTimestamp="2026-02-02 22:57:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:57:56.351598594 +0000 UTC m=+1432.042818990" watchObservedRunningTime="2026-02-02 22:57:56.358963831 +0000 UTC m=+1432.050184197" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.380672 4755 scope.go:117] "RemoveContainer" containerID="e35c3b7030be3217cbbed55d1a767c2e67e274f40a17e5f96ddbf68bbf4cce20" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.396124 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rfqj4"] Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.408526 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rfqj4"] Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.417357 4755 scope.go:117] "RemoveContainer" containerID="f6776e005cbf09f4e35fa1fbf20523eaab8931899a9b5c8b893fd62bc3ab503a" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.470895 4755 scope.go:117] "RemoveContainer" containerID="2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb" Feb 02 22:57:56 crc kubenswrapper[4755]: E0202 22:57:56.471605 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb\": container with ID starting with 2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb not found: ID does not exist" containerID="2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.471653 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb"} err="failed to get container status \"2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb\": rpc error: code = NotFound desc = could not find container \"2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb\": container with ID starting with 2bb8b68404d5c3466ac8c171cf474f3e141b696cd6f53d4190c9ef841f5b8cbb not found: ID does not exist" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.471679 4755 scope.go:117] "RemoveContainer" containerID="e35c3b7030be3217cbbed55d1a767c2e67e274f40a17e5f96ddbf68bbf4cce20" Feb 02 22:57:56 crc kubenswrapper[4755]: E0202 22:57:56.472232 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e35c3b7030be3217cbbed55d1a767c2e67e274f40a17e5f96ddbf68bbf4cce20\": container with ID starting with e35c3b7030be3217cbbed55d1a767c2e67e274f40a17e5f96ddbf68bbf4cce20 not found: ID does not exist" containerID="e35c3b7030be3217cbbed55d1a767c2e67e274f40a17e5f96ddbf68bbf4cce20" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.472325 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e35c3b7030be3217cbbed55d1a767c2e67e274f40a17e5f96ddbf68bbf4cce20"} err="failed to get container status \"e35c3b7030be3217cbbed55d1a767c2e67e274f40a17e5f96ddbf68bbf4cce20\": rpc error: code = NotFound desc = could not find container \"e35c3b7030be3217cbbed55d1a767c2e67e274f40a17e5f96ddbf68bbf4cce20\": container with ID starting with e35c3b7030be3217cbbed55d1a767c2e67e274f40a17e5f96ddbf68bbf4cce20 not found: ID does not exist" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.472403 4755 scope.go:117] "RemoveContainer" containerID="f6776e005cbf09f4e35fa1fbf20523eaab8931899a9b5c8b893fd62bc3ab503a" Feb 02 22:57:56 crc kubenswrapper[4755]: E0202 22:57:56.473122 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6776e005cbf09f4e35fa1fbf20523eaab8931899a9b5c8b893fd62bc3ab503a\": container with ID starting with f6776e005cbf09f4e35fa1fbf20523eaab8931899a9b5c8b893fd62bc3ab503a not found: ID does not exist" containerID="f6776e005cbf09f4e35fa1fbf20523eaab8931899a9b5c8b893fd62bc3ab503a" Feb 02 22:57:56 crc kubenswrapper[4755]: I0202 22:57:56.473178 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6776e005cbf09f4e35fa1fbf20523eaab8931899a9b5c8b893fd62bc3ab503a"} err="failed to get container status \"f6776e005cbf09f4e35fa1fbf20523eaab8931899a9b5c8b893fd62bc3ab503a\": rpc error: code = NotFound desc = could not find container \"f6776e005cbf09f4e35fa1fbf20523eaab8931899a9b5c8b893fd62bc3ab503a\": container with ID starting with f6776e005cbf09f4e35fa1fbf20523eaab8931899a9b5c8b893fd62bc3ab503a not found: ID does not exist" Feb 02 22:57:57 crc kubenswrapper[4755]: I0202 22:57:57.088009 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" path="/var/lib/kubelet/pods/6d72a2c1-c4d9-4f54-8ca1-aa44762ae943/volumes" Feb 02 22:58:01 crc kubenswrapper[4755]: I0202 22:58:01.463759 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 02 22:58:03 crc kubenswrapper[4755]: I0202 22:58:03.915780 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-c4b758ff5-78spt" Feb 02 22:58:03 crc kubenswrapper[4755]: I0202 22:58:03.994560 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dc7c944bf-f6zml"] Feb 02 22:58:03 crc kubenswrapper[4755]: I0202 22:58:03.998906 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" podUID="08987794-2101-4138-8efe-915de10da5be" containerName="dnsmasq-dns" containerID="cri-o://3fc18d501e2d47e033382102a5b3a7e985c17be2b7bc804f2ddbb27a543708ee" gracePeriod=10 Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.431793 4755 generic.go:334] "Generic (PLEG): container finished" podID="08987794-2101-4138-8efe-915de10da5be" containerID="3fc18d501e2d47e033382102a5b3a7e985c17be2b7bc804f2ddbb27a543708ee" exitCode=0 Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.432005 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" event={"ID":"08987794-2101-4138-8efe-915de10da5be","Type":"ContainerDied","Data":"3fc18d501e2d47e033382102a5b3a7e985c17be2b7bc804f2ddbb27a543708ee"} Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.432036 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" event={"ID":"08987794-2101-4138-8efe-915de10da5be","Type":"ContainerDied","Data":"288cbc3fbed4422f71c08c547314b913648ce947b10ec9b23aee491ac742fdfa"} Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.432051 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="288cbc3fbed4422f71c08c547314b913648ce947b10ec9b23aee491ac742fdfa" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.506637 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.638093 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q74jl\" (UniqueName: \"kubernetes.io/projected/08987794-2101-4138-8efe-915de10da5be-kube-api-access-q74jl\") pod \"08987794-2101-4138-8efe-915de10da5be\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.638388 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-openstack-edpm-ipam\") pod \"08987794-2101-4138-8efe-915de10da5be\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.638420 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-ovsdbserver-sb\") pod \"08987794-2101-4138-8efe-915de10da5be\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.638457 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-ovsdbserver-nb\") pod \"08987794-2101-4138-8efe-915de10da5be\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.638485 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-dns-svc\") pod \"08987794-2101-4138-8efe-915de10da5be\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.638557 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-dns-swift-storage-0\") pod \"08987794-2101-4138-8efe-915de10da5be\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.638594 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-config\") pod \"08987794-2101-4138-8efe-915de10da5be\" (UID: \"08987794-2101-4138-8efe-915de10da5be\") " Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.653652 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08987794-2101-4138-8efe-915de10da5be-kube-api-access-q74jl" (OuterVolumeSpecName: "kube-api-access-q74jl") pod "08987794-2101-4138-8efe-915de10da5be" (UID: "08987794-2101-4138-8efe-915de10da5be"). InnerVolumeSpecName "kube-api-access-q74jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.710117 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "08987794-2101-4138-8efe-915de10da5be" (UID: "08987794-2101-4138-8efe-915de10da5be"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.711229 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "08987794-2101-4138-8efe-915de10da5be" (UID: "08987794-2101-4138-8efe-915de10da5be"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.712215 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "08987794-2101-4138-8efe-915de10da5be" (UID: "08987794-2101-4138-8efe-915de10da5be"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.717720 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "08987794-2101-4138-8efe-915de10da5be" (UID: "08987794-2101-4138-8efe-915de10da5be"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.719202 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-config" (OuterVolumeSpecName: "config") pod "08987794-2101-4138-8efe-915de10da5be" (UID: "08987794-2101-4138-8efe-915de10da5be"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.720528 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "08987794-2101-4138-8efe-915de10da5be" (UID: "08987794-2101-4138-8efe-915de10da5be"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.741366 4755 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.741400 4755 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.741410 4755 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-config\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.741420 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q74jl\" (UniqueName: \"kubernetes.io/projected/08987794-2101-4138-8efe-915de10da5be-kube-api-access-q74jl\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.741433 4755 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.741441 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:04 crc kubenswrapper[4755]: I0202 22:58:04.741448 4755 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/08987794-2101-4138-8efe-915de10da5be-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:05 crc kubenswrapper[4755]: I0202 22:58:05.446676 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dc7c944bf-f6zml" Feb 02 22:58:05 crc kubenswrapper[4755]: I0202 22:58:05.484586 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dc7c944bf-f6zml"] Feb 02 22:58:05 crc kubenswrapper[4755]: I0202 22:58:05.496075 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-dc7c944bf-f6zml"] Feb 02 22:58:07 crc kubenswrapper[4755]: I0202 22:58:07.090874 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08987794-2101-4138-8efe-915de10da5be" path="/var/lib/kubelet/pods/08987794-2101-4138-8efe-915de10da5be/volumes" Feb 02 22:58:11 crc kubenswrapper[4755]: E0202 22:58:11.176311 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08987794_2101_4138_8efe_915de10da5be.slice/crio-288cbc3fbed4422f71c08c547314b913648ce947b10ec9b23aee491ac742fdfa\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08987794_2101_4138_8efe_915de10da5be.slice\": RecentStats: unable to find data in memory cache]" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.632947 4755 generic.go:334] "Generic (PLEG): container finished" podID="a4a28287-a8ee-439b-a1af-927b8819a6ae" containerID="45df599ad9e3cf32e1e6d1ce9c92ebc437a64aa46f5bcf98e86ba902a77bcdf1" exitCode=0 Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.633012 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a4a28287-a8ee-439b-a1af-927b8819a6ae","Type":"ContainerDied","Data":"45df599ad9e3cf32e1e6d1ce9c92ebc437a64aa46f5bcf98e86ba902a77bcdf1"} Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.866814 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9"] Feb 02 22:58:16 crc kubenswrapper[4755]: E0202 22:58:16.867244 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88f43d2-9cf6-43a0-b2bd-f945f279eea2" containerName="init" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.867260 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88f43d2-9cf6-43a0-b2bd-f945f279eea2" containerName="init" Feb 02 22:58:16 crc kubenswrapper[4755]: E0202 22:58:16.867278 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88f43d2-9cf6-43a0-b2bd-f945f279eea2" containerName="dnsmasq-dns" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.867285 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88f43d2-9cf6-43a0-b2bd-f945f279eea2" containerName="dnsmasq-dns" Feb 02 22:58:16 crc kubenswrapper[4755]: E0202 22:58:16.867299 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08987794-2101-4138-8efe-915de10da5be" containerName="dnsmasq-dns" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.867306 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="08987794-2101-4138-8efe-915de10da5be" containerName="dnsmasq-dns" Feb 02 22:58:16 crc kubenswrapper[4755]: E0202 22:58:16.867315 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08987794-2101-4138-8efe-915de10da5be" containerName="init" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.867320 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="08987794-2101-4138-8efe-915de10da5be" containerName="init" Feb 02 22:58:16 crc kubenswrapper[4755]: E0202 22:58:16.867332 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerName="extract-content" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.867337 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerName="extract-content" Feb 02 22:58:16 crc kubenswrapper[4755]: E0202 22:58:16.867349 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerName="registry-server" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.867354 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerName="registry-server" Feb 02 22:58:16 crc kubenswrapper[4755]: E0202 22:58:16.867370 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerName="extract-utilities" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.867376 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerName="extract-utilities" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.867552 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="a88f43d2-9cf6-43a0-b2bd-f945f279eea2" containerName="dnsmasq-dns" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.867579 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="08987794-2101-4138-8efe-915de10da5be" containerName="dnsmasq-dns" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.867598 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d72a2c1-c4d9-4f54-8ca1-aa44762ae943" containerName="registry-server" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.868332 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.872336 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.872508 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.872635 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.874409 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.886131 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9"] Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.954806 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.954854 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.954947 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:16 crc kubenswrapper[4755]: I0202 22:58:16.954964 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxvwx\" (UniqueName: \"kubernetes.io/projected/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-kube-api-access-kxvwx\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.056966 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.057234 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxvwx\" (UniqueName: \"kubernetes.io/projected/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-kube-api-access-kxvwx\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.057442 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.057539 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.062581 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.062683 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.063221 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.081714 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxvwx\" (UniqueName: \"kubernetes.io/projected/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-kube-api-access-kxvwx\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.239643 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.646140 4755 generic.go:334] "Generic (PLEG): container finished" podID="e09fc49d-8b5d-4775-a549-b5ca23d3d13e" containerID="5dca39ef4efee90e9812ad358310c9fbb5f1369cd7a3168be1065845bb580261" exitCode=0 Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.646361 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e09fc49d-8b5d-4775-a549-b5ca23d3d13e","Type":"ContainerDied","Data":"5dca39ef4efee90e9812ad358310c9fbb5f1369cd7a3168be1065845bb580261"} Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.648894 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a4a28287-a8ee-439b-a1af-927b8819a6ae","Type":"ContainerStarted","Data":"b6e6c0a86957c1d6153ce484fba53dbd210b6838f69c17b3fd43baf31a6a0a52"} Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.649096 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.727607 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.727584606 podStartE2EDuration="36.727584606s" podCreationTimestamp="2026-02-02 22:57:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:58:17.726489715 +0000 UTC m=+1453.417710051" watchObservedRunningTime="2026-02-02 22:58:17.727584606 +0000 UTC m=+1453.418804942" Feb 02 22:58:17 crc kubenswrapper[4755]: I0202 22:58:17.844907 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9"] Feb 02 22:58:17 crc kubenswrapper[4755]: W0202 22:58:17.845681 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podebfacc8a_aef0_4478_bd5f_5285e424fa0b.slice/crio-d95ce6a20bd8eca7f8f8557a104a2cd3d780f55d8f16b6088c8bfaa8e7966043 WatchSource:0}: Error finding container d95ce6a20bd8eca7f8f8557a104a2cd3d780f55d8f16b6088c8bfaa8e7966043: Status 404 returned error can't find the container with id d95ce6a20bd8eca7f8f8557a104a2cd3d780f55d8f16b6088c8bfaa8e7966043 Feb 02 22:58:18 crc kubenswrapper[4755]: I0202 22:58:18.670109 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e09fc49d-8b5d-4775-a549-b5ca23d3d13e","Type":"ContainerStarted","Data":"0a0224ed9c5b85f9a00e244a4a511324761b233d2bb6f9049f854bb06b5fcebb"} Feb 02 22:58:18 crc kubenswrapper[4755]: I0202 22:58:18.671352 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:58:18 crc kubenswrapper[4755]: I0202 22:58:18.674505 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" event={"ID":"ebfacc8a-aef0-4478-bd5f-5285e424fa0b","Type":"ContainerStarted","Data":"d95ce6a20bd8eca7f8f8557a104a2cd3d780f55d8f16b6088c8bfaa8e7966043"} Feb 02 22:58:18 crc kubenswrapper[4755]: I0202 22:58:18.708397 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.708372047 podStartE2EDuration="36.708372047s" podCreationTimestamp="2026-02-02 22:57:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 22:58:18.700907108 +0000 UTC m=+1454.392127444" watchObservedRunningTime="2026-02-02 22:58:18.708372047 +0000 UTC m=+1454.399592373" Feb 02 22:58:21 crc kubenswrapper[4755]: E0202 22:58:21.452145 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08987794_2101_4138_8efe_915de10da5be.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08987794_2101_4138_8efe_915de10da5be.slice/crio-288cbc3fbed4422f71c08c547314b913648ce947b10ec9b23aee491ac742fdfa\": RecentStats: unable to find data in memory cache]" Feb 02 22:58:22 crc kubenswrapper[4755]: I0202 22:58:22.615482 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Feb 02 22:58:27 crc kubenswrapper[4755]: I0202 22:58:27.813407 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" event={"ID":"ebfacc8a-aef0-4478-bd5f-5285e424fa0b","Type":"ContainerStarted","Data":"4e4bee1e7a364074ca89401b3d9ac53a8eefd9d9c36a796ca208b762295e50aa"} Feb 02 22:58:27 crc kubenswrapper[4755]: I0202 22:58:27.850353 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" podStartSLOduration=2.728189334 podStartE2EDuration="11.850328299s" podCreationTimestamp="2026-02-02 22:58:16 +0000 UTC" firstStartedPulling="2026-02-02 22:58:17.847811314 +0000 UTC m=+1453.539031640" lastFinishedPulling="2026-02-02 22:58:26.969950239 +0000 UTC m=+1462.661170605" observedRunningTime="2026-02-02 22:58:27.837000945 +0000 UTC m=+1463.528221291" watchObservedRunningTime="2026-02-02 22:58:27.850328299 +0000 UTC m=+1463.541548635" Feb 02 22:58:31 crc kubenswrapper[4755]: I0202 22:58:31.489897 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 02 22:58:31 crc kubenswrapper[4755]: E0202 22:58:31.797684 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08987794_2101_4138_8efe_915de10da5be.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08987794_2101_4138_8efe_915de10da5be.slice/crio-288cbc3fbed4422f71c08c547314b913648ce947b10ec9b23aee491ac742fdfa\": RecentStats: unable to find data in memory cache]" Feb 02 22:58:32 crc kubenswrapper[4755]: I0202 22:58:32.547208 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 02 22:58:38 crc kubenswrapper[4755]: I0202 22:58:38.942912 4755 generic.go:334] "Generic (PLEG): container finished" podID="ebfacc8a-aef0-4478-bd5f-5285e424fa0b" containerID="4e4bee1e7a364074ca89401b3d9ac53a8eefd9d9c36a796ca208b762295e50aa" exitCode=0 Feb 02 22:58:38 crc kubenswrapper[4755]: I0202 22:58:38.943168 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" event={"ID":"ebfacc8a-aef0-4478-bd5f-5285e424fa0b","Type":"ContainerDied","Data":"4e4bee1e7a364074ca89401b3d9ac53a8eefd9d9c36a796ca208b762295e50aa"} Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.474399 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.519914 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-ssh-key-openstack-edpm-ipam\") pod \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.520030 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-repo-setup-combined-ca-bundle\") pod \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.520115 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-inventory\") pod \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.520303 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxvwx\" (UniqueName: \"kubernetes.io/projected/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-kube-api-access-kxvwx\") pod \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\" (UID: \"ebfacc8a-aef0-4478-bd5f-5285e424fa0b\") " Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.539115 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "ebfacc8a-aef0-4478-bd5f-5285e424fa0b" (UID: "ebfacc8a-aef0-4478-bd5f-5285e424fa0b"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.539149 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-kube-api-access-kxvwx" (OuterVolumeSpecName: "kube-api-access-kxvwx") pod "ebfacc8a-aef0-4478-bd5f-5285e424fa0b" (UID: "ebfacc8a-aef0-4478-bd5f-5285e424fa0b"). InnerVolumeSpecName "kube-api-access-kxvwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.561608 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ebfacc8a-aef0-4478-bd5f-5285e424fa0b" (UID: "ebfacc8a-aef0-4478-bd5f-5285e424fa0b"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.561857 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-inventory" (OuterVolumeSpecName: "inventory") pod "ebfacc8a-aef0-4478-bd5f-5285e424fa0b" (UID: "ebfacc8a-aef0-4478-bd5f-5285e424fa0b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.623145 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.623196 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxvwx\" (UniqueName: \"kubernetes.io/projected/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-kube-api-access-kxvwx\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.623221 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.623239 4755 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebfacc8a-aef0-4478-bd5f-5285e424fa0b-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.975076 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" event={"ID":"ebfacc8a-aef0-4478-bd5f-5285e424fa0b","Type":"ContainerDied","Data":"d95ce6a20bd8eca7f8f8557a104a2cd3d780f55d8f16b6088c8bfaa8e7966043"} Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.975115 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d95ce6a20bd8eca7f8f8557a104a2cd3d780f55d8f16b6088c8bfaa8e7966043" Feb 02 22:58:40 crc kubenswrapper[4755]: I0202 22:58:40.975152 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.053816 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd"] Feb 02 22:58:41 crc kubenswrapper[4755]: E0202 22:58:41.054620 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebfacc8a-aef0-4478-bd5f-5285e424fa0b" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.054746 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebfacc8a-aef0-4478-bd5f-5285e424fa0b" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.055131 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebfacc8a-aef0-4478-bd5f-5285e424fa0b" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.056122 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.059475 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.059639 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.060238 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.068178 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.097808 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd"] Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.132183 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb72c052-d5a9-4448-af05-1396fd408383-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cdlnd\" (UID: \"bb72c052-d5a9-4448-af05-1396fd408383\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.132484 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zntw6\" (UniqueName: \"kubernetes.io/projected/bb72c052-d5a9-4448-af05-1396fd408383-kube-api-access-zntw6\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cdlnd\" (UID: \"bb72c052-d5a9-4448-af05-1396fd408383\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.132601 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb72c052-d5a9-4448-af05-1396fd408383-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cdlnd\" (UID: \"bb72c052-d5a9-4448-af05-1396fd408383\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.234677 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb72c052-d5a9-4448-af05-1396fd408383-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cdlnd\" (UID: \"bb72c052-d5a9-4448-af05-1396fd408383\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.234884 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb72c052-d5a9-4448-af05-1396fd408383-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cdlnd\" (UID: \"bb72c052-d5a9-4448-af05-1396fd408383\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.234980 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zntw6\" (UniqueName: \"kubernetes.io/projected/bb72c052-d5a9-4448-af05-1396fd408383-kube-api-access-zntw6\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cdlnd\" (UID: \"bb72c052-d5a9-4448-af05-1396fd408383\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.240096 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb72c052-d5a9-4448-af05-1396fd408383-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cdlnd\" (UID: \"bb72c052-d5a9-4448-af05-1396fd408383\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.241942 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb72c052-d5a9-4448-af05-1396fd408383-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cdlnd\" (UID: \"bb72c052-d5a9-4448-af05-1396fd408383\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.250211 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zntw6\" (UniqueName: \"kubernetes.io/projected/bb72c052-d5a9-4448-af05-1396fd408383-kube-api-access-zntw6\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-cdlnd\" (UID: \"bb72c052-d5a9-4448-af05-1396fd408383\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:41 crc kubenswrapper[4755]: I0202 22:58:41.394259 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:42 crc kubenswrapper[4755]: W0202 22:58:42.061413 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb72c052_d5a9_4448_af05_1396fd408383.slice/crio-bb875b96789cbadc339fea549f6d7c555c0716874aa1136e8b27223b6305fc8c WatchSource:0}: Error finding container bb875b96789cbadc339fea549f6d7c555c0716874aa1136e8b27223b6305fc8c: Status 404 returned error can't find the container with id bb875b96789cbadc339fea549f6d7c555c0716874aa1136e8b27223b6305fc8c Feb 02 22:58:42 crc kubenswrapper[4755]: I0202 22:58:42.075668 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd"] Feb 02 22:58:42 crc kubenswrapper[4755]: E0202 22:58:42.110121 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08987794_2101_4138_8efe_915de10da5be.slice/crio-288cbc3fbed4422f71c08c547314b913648ce947b10ec9b23aee491ac742fdfa\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08987794_2101_4138_8efe_915de10da5be.slice\": RecentStats: unable to find data in memory cache]" Feb 02 22:58:42 crc kubenswrapper[4755]: I0202 22:58:42.998120 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" event={"ID":"bb72c052-d5a9-4448-af05-1396fd408383","Type":"ContainerStarted","Data":"46152b25f1c173b27edf2cedf9b8b6b0b492f2ec1d3098374451255df891abbd"} Feb 02 22:58:42 crc kubenswrapper[4755]: I0202 22:58:42.998811 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" event={"ID":"bb72c052-d5a9-4448-af05-1396fd408383","Type":"ContainerStarted","Data":"bb875b96789cbadc339fea549f6d7c555c0716874aa1136e8b27223b6305fc8c"} Feb 02 22:58:43 crc kubenswrapper[4755]: I0202 22:58:43.028863 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" podStartSLOduration=1.567020407 podStartE2EDuration="2.02883163s" podCreationTimestamp="2026-02-02 22:58:41 +0000 UTC" firstStartedPulling="2026-02-02 22:58:42.064272175 +0000 UTC m=+1477.755492511" lastFinishedPulling="2026-02-02 22:58:42.526083408 +0000 UTC m=+1478.217303734" observedRunningTime="2026-02-02 22:58:43.018926222 +0000 UTC m=+1478.710146558" watchObservedRunningTime="2026-02-02 22:58:43.02883163 +0000 UTC m=+1478.720051996" Feb 02 22:58:46 crc kubenswrapper[4755]: I0202 22:58:46.041460 4755 generic.go:334] "Generic (PLEG): container finished" podID="bb72c052-d5a9-4448-af05-1396fd408383" containerID="46152b25f1c173b27edf2cedf9b8b6b0b492f2ec1d3098374451255df891abbd" exitCode=0 Feb 02 22:58:46 crc kubenswrapper[4755]: I0202 22:58:46.041546 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" event={"ID":"bb72c052-d5a9-4448-af05-1396fd408383","Type":"ContainerDied","Data":"46152b25f1c173b27edf2cedf9b8b6b0b492f2ec1d3098374451255df891abbd"} Feb 02 22:58:47 crc kubenswrapper[4755]: I0202 22:58:47.618249 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:47 crc kubenswrapper[4755]: I0202 22:58:47.673545 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb72c052-d5a9-4448-af05-1396fd408383-ssh-key-openstack-edpm-ipam\") pod \"bb72c052-d5a9-4448-af05-1396fd408383\" (UID: \"bb72c052-d5a9-4448-af05-1396fd408383\") " Feb 02 22:58:47 crc kubenswrapper[4755]: I0202 22:58:47.673684 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zntw6\" (UniqueName: \"kubernetes.io/projected/bb72c052-d5a9-4448-af05-1396fd408383-kube-api-access-zntw6\") pod \"bb72c052-d5a9-4448-af05-1396fd408383\" (UID: \"bb72c052-d5a9-4448-af05-1396fd408383\") " Feb 02 22:58:47 crc kubenswrapper[4755]: I0202 22:58:47.673842 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb72c052-d5a9-4448-af05-1396fd408383-inventory\") pod \"bb72c052-d5a9-4448-af05-1396fd408383\" (UID: \"bb72c052-d5a9-4448-af05-1396fd408383\") " Feb 02 22:58:47 crc kubenswrapper[4755]: I0202 22:58:47.681019 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb72c052-d5a9-4448-af05-1396fd408383-kube-api-access-zntw6" (OuterVolumeSpecName: "kube-api-access-zntw6") pod "bb72c052-d5a9-4448-af05-1396fd408383" (UID: "bb72c052-d5a9-4448-af05-1396fd408383"). InnerVolumeSpecName "kube-api-access-zntw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:58:47 crc kubenswrapper[4755]: I0202 22:58:47.707596 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb72c052-d5a9-4448-af05-1396fd408383-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "bb72c052-d5a9-4448-af05-1396fd408383" (UID: "bb72c052-d5a9-4448-af05-1396fd408383"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:58:47 crc kubenswrapper[4755]: I0202 22:58:47.720243 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb72c052-d5a9-4448-af05-1396fd408383-inventory" (OuterVolumeSpecName: "inventory") pod "bb72c052-d5a9-4448-af05-1396fd408383" (UID: "bb72c052-d5a9-4448-af05-1396fd408383"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 22:58:47 crc kubenswrapper[4755]: I0202 22:58:47.776867 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bb72c052-d5a9-4448-af05-1396fd408383-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:47 crc kubenswrapper[4755]: I0202 22:58:47.776905 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb72c052-d5a9-4448-af05-1396fd408383-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:47 crc kubenswrapper[4755]: I0202 22:58:47.776921 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zntw6\" (UniqueName: \"kubernetes.io/projected/bb72c052-d5a9-4448-af05-1396fd408383-kube-api-access-zntw6\") on node \"crc\" DevicePath \"\"" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.066212 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" event={"ID":"bb72c052-d5a9-4448-af05-1396fd408383","Type":"ContainerDied","Data":"bb875b96789cbadc339fea549f6d7c555c0716874aa1136e8b27223b6305fc8c"} Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.066254 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb875b96789cbadc339fea549f6d7c555c0716874aa1136e8b27223b6305fc8c" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.066273 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-cdlnd" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.147576 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj"] Feb 02 22:58:48 crc kubenswrapper[4755]: E0202 22:58:48.148271 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb72c052-d5a9-4448-af05-1396fd408383" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.148307 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb72c052-d5a9-4448-af05-1396fd408383" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.148628 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb72c052-d5a9-4448-af05-1396fd408383" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.150028 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.162902 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.163289 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.163362 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.163388 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.185967 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj"] Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.186367 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.186430 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.186479 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cgls\" (UniqueName: \"kubernetes.io/projected/3501887a-bec4-43bd-b0ed-1daf65ae1331-kube-api-access-5cgls\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.186710 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.287923 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.287980 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.288029 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cgls\" (UniqueName: \"kubernetes.io/projected/3501887a-bec4-43bd-b0ed-1daf65ae1331-kube-api-access-5cgls\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.288085 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.292969 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.297570 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.297993 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.310874 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cgls\" (UniqueName: \"kubernetes.io/projected/3501887a-bec4-43bd-b0ed-1daf65ae1331-kube-api-access-5cgls\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:48 crc kubenswrapper[4755]: I0202 22:58:48.486297 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 22:58:49 crc kubenswrapper[4755]: I0202 22:58:49.190341 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj"] Feb 02 22:58:49 crc kubenswrapper[4755]: W0202 22:58:49.192469 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3501887a_bec4_43bd_b0ed_1daf65ae1331.slice/crio-8ca7c9a034b2ae9052d8a7fa75394cabc6f7b0ece352477544b2ca7f6ce968ab WatchSource:0}: Error finding container 8ca7c9a034b2ae9052d8a7fa75394cabc6f7b0ece352477544b2ca7f6ce968ab: Status 404 returned error can't find the container with id 8ca7c9a034b2ae9052d8a7fa75394cabc6f7b0ece352477544b2ca7f6ce968ab Feb 02 22:58:50 crc kubenswrapper[4755]: I0202 22:58:50.093562 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" event={"ID":"3501887a-bec4-43bd-b0ed-1daf65ae1331","Type":"ContainerStarted","Data":"30067130dadae37055fc7d33ae1aaf8d4505991c0179543b799febfa003bdfab"} Feb 02 22:58:50 crc kubenswrapper[4755]: I0202 22:58:50.093875 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" event={"ID":"3501887a-bec4-43bd-b0ed-1daf65ae1331","Type":"ContainerStarted","Data":"8ca7c9a034b2ae9052d8a7fa75394cabc6f7b0ece352477544b2ca7f6ce968ab"} Feb 02 22:58:50 crc kubenswrapper[4755]: I0202 22:58:50.113137 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" podStartSLOduration=1.722796331 podStartE2EDuration="2.113121386s" podCreationTimestamp="2026-02-02 22:58:48 +0000 UTC" firstStartedPulling="2026-02-02 22:58:49.194924849 +0000 UTC m=+1484.886145175" lastFinishedPulling="2026-02-02 22:58:49.585249904 +0000 UTC m=+1485.276470230" observedRunningTime="2026-02-02 22:58:50.110071111 +0000 UTC m=+1485.801291477" watchObservedRunningTime="2026-02-02 22:58:50.113121386 +0000 UTC m=+1485.804341722" Feb 02 22:58:52 crc kubenswrapper[4755]: E0202 22:58:52.408987 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08987794_2101_4138_8efe_915de10da5be.slice/crio-288cbc3fbed4422f71c08c547314b913648ce947b10ec9b23aee491ac742fdfa\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08987794_2101_4138_8efe_915de10da5be.slice\": RecentStats: unable to find data in memory cache]" Feb 02 22:59:02 crc kubenswrapper[4755]: E0202 22:59:02.678701 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08987794_2101_4138_8efe_915de10da5be.slice/crio-288cbc3fbed4422f71c08c547314b913648ce947b10ec9b23aee491ac742fdfa\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08987794_2101_4138_8efe_915de10da5be.slice\": RecentStats: unable to find data in memory cache]" Feb 02 22:59:08 crc kubenswrapper[4755]: I0202 22:59:08.281095 4755 scope.go:117] "RemoveContainer" containerID="441f708504e129ad598c3c3fe1dc01da921165d12d066f8b0330b996f14943cf" Feb 02 22:59:08 crc kubenswrapper[4755]: I0202 22:59:08.320892 4755 scope.go:117] "RemoveContainer" containerID="899197e3c492a122a57091f6f598739acb6b588f83d939fe40d80667db363b0c" Feb 02 22:59:23 crc kubenswrapper[4755]: I0202 22:59:23.390708 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:59:23 crc kubenswrapper[4755]: I0202 22:59:23.391496 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.619002 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-k8vgb"] Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.625572 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.646123 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k8vgb"] Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.725102 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52f7t\" (UniqueName: \"kubernetes.io/projected/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-kube-api-access-52f7t\") pod \"community-operators-k8vgb\" (UID: \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\") " pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.725164 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-catalog-content\") pod \"community-operators-k8vgb\" (UID: \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\") " pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.725258 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-utilities\") pod \"community-operators-k8vgb\" (UID: \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\") " pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.827178 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52f7t\" (UniqueName: \"kubernetes.io/projected/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-kube-api-access-52f7t\") pod \"community-operators-k8vgb\" (UID: \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\") " pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.827510 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-catalog-content\") pod \"community-operators-k8vgb\" (UID: \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\") " pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.827592 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-utilities\") pod \"community-operators-k8vgb\" (UID: \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\") " pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.828079 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-utilities\") pod \"community-operators-k8vgb\" (UID: \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\") " pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.828585 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-catalog-content\") pod \"community-operators-k8vgb\" (UID: \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\") " pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.846250 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52f7t\" (UniqueName: \"kubernetes.io/projected/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-kube-api-access-52f7t\") pod \"community-operators-k8vgb\" (UID: \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\") " pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:26 crc kubenswrapper[4755]: I0202 22:59:26.961369 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:28 crc kubenswrapper[4755]: I0202 22:59:28.366642 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k8vgb"] Feb 02 22:59:28 crc kubenswrapper[4755]: I0202 22:59:28.597210 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8vgb" event={"ID":"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd","Type":"ContainerStarted","Data":"e4a31442f61f194df9186884a7c08e6f4e2f9c91e9431e84c9894a81fdfc3b5f"} Feb 02 22:59:28 crc kubenswrapper[4755]: I0202 22:59:28.597258 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8vgb" event={"ID":"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd","Type":"ContainerStarted","Data":"56127b4e441512e6b0cdb3694139d947d9d7cdb717f451915de83f801c455576"} Feb 02 22:59:29 crc kubenswrapper[4755]: I0202 22:59:29.612906 4755 generic.go:334] "Generic (PLEG): container finished" podID="1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" containerID="e4a31442f61f194df9186884a7c08e6f4e2f9c91e9431e84c9894a81fdfc3b5f" exitCode=0 Feb 02 22:59:29 crc kubenswrapper[4755]: I0202 22:59:29.613006 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8vgb" event={"ID":"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd","Type":"ContainerDied","Data":"e4a31442f61f194df9186884a7c08e6f4e2f9c91e9431e84c9894a81fdfc3b5f"} Feb 02 22:59:30 crc kubenswrapper[4755]: I0202 22:59:30.625573 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8vgb" event={"ID":"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd","Type":"ContainerStarted","Data":"8a2db8ce0dc765f18149ec258e2b0ae74ac4acc3328b281e167fd33ba1353ed6"} Feb 02 22:59:32 crc kubenswrapper[4755]: I0202 22:59:32.651672 4755 generic.go:334] "Generic (PLEG): container finished" podID="1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" containerID="8a2db8ce0dc765f18149ec258e2b0ae74ac4acc3328b281e167fd33ba1353ed6" exitCode=0 Feb 02 22:59:32 crc kubenswrapper[4755]: I0202 22:59:32.651774 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8vgb" event={"ID":"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd","Type":"ContainerDied","Data":"8a2db8ce0dc765f18149ec258e2b0ae74ac4acc3328b281e167fd33ba1353ed6"} Feb 02 22:59:33 crc kubenswrapper[4755]: I0202 22:59:33.664091 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8vgb" event={"ID":"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd","Type":"ContainerStarted","Data":"ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284"} Feb 02 22:59:33 crc kubenswrapper[4755]: I0202 22:59:33.718646 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-k8vgb" podStartSLOduration=4.174550238 podStartE2EDuration="7.718627482s" podCreationTimestamp="2026-02-02 22:59:26 +0000 UTC" firstStartedPulling="2026-02-02 22:59:29.61811477 +0000 UTC m=+1525.309335106" lastFinishedPulling="2026-02-02 22:59:33.162191994 +0000 UTC m=+1528.853412350" observedRunningTime="2026-02-02 22:59:33.685166677 +0000 UTC m=+1529.376387023" watchObservedRunningTime="2026-02-02 22:59:33.718627482 +0000 UTC m=+1529.409847808" Feb 02 22:59:36 crc kubenswrapper[4755]: I0202 22:59:36.962499 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:36 crc kubenswrapper[4755]: I0202 22:59:36.963912 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:37 crc kubenswrapper[4755]: I0202 22:59:37.037602 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.431501 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9nbmf"] Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.436180 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.443696 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9nbmf"] Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.490036 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ca0b945-b7b0-4a88-802c-cb698967faf4-catalog-content\") pod \"certified-operators-9nbmf\" (UID: \"3ca0b945-b7b0-4a88-802c-cb698967faf4\") " pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.490129 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-629rn\" (UniqueName: \"kubernetes.io/projected/3ca0b945-b7b0-4a88-802c-cb698967faf4-kube-api-access-629rn\") pod \"certified-operators-9nbmf\" (UID: \"3ca0b945-b7b0-4a88-802c-cb698967faf4\") " pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.490206 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ca0b945-b7b0-4a88-802c-cb698967faf4-utilities\") pod \"certified-operators-9nbmf\" (UID: \"3ca0b945-b7b0-4a88-802c-cb698967faf4\") " pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.592858 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ca0b945-b7b0-4a88-802c-cb698967faf4-catalog-content\") pod \"certified-operators-9nbmf\" (UID: \"3ca0b945-b7b0-4a88-802c-cb698967faf4\") " pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.592920 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-629rn\" (UniqueName: \"kubernetes.io/projected/3ca0b945-b7b0-4a88-802c-cb698967faf4-kube-api-access-629rn\") pod \"certified-operators-9nbmf\" (UID: \"3ca0b945-b7b0-4a88-802c-cb698967faf4\") " pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.592967 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ca0b945-b7b0-4a88-802c-cb698967faf4-utilities\") pod \"certified-operators-9nbmf\" (UID: \"3ca0b945-b7b0-4a88-802c-cb698967faf4\") " pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.593571 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ca0b945-b7b0-4a88-802c-cb698967faf4-utilities\") pod \"certified-operators-9nbmf\" (UID: \"3ca0b945-b7b0-4a88-802c-cb698967faf4\") " pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.593602 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ca0b945-b7b0-4a88-802c-cb698967faf4-catalog-content\") pod \"certified-operators-9nbmf\" (UID: \"3ca0b945-b7b0-4a88-802c-cb698967faf4\") " pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.617682 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-629rn\" (UniqueName: \"kubernetes.io/projected/3ca0b945-b7b0-4a88-802c-cb698967faf4-kube-api-access-629rn\") pod \"certified-operators-9nbmf\" (UID: \"3ca0b945-b7b0-4a88-802c-cb698967faf4\") " pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:39 crc kubenswrapper[4755]: I0202 22:59:39.792347 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:40 crc kubenswrapper[4755]: I0202 22:59:40.271390 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9nbmf"] Feb 02 22:59:40 crc kubenswrapper[4755]: I0202 22:59:40.747091 4755 generic.go:334] "Generic (PLEG): container finished" podID="3ca0b945-b7b0-4a88-802c-cb698967faf4" containerID="eb9fc68a8f3fa6327349b8fefc454bd499919a011f1859b8f9564ffe0cce7e77" exitCode=0 Feb 02 22:59:40 crc kubenswrapper[4755]: I0202 22:59:40.747526 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9nbmf" event={"ID":"3ca0b945-b7b0-4a88-802c-cb698967faf4","Type":"ContainerDied","Data":"eb9fc68a8f3fa6327349b8fefc454bd499919a011f1859b8f9564ffe0cce7e77"} Feb 02 22:59:40 crc kubenswrapper[4755]: I0202 22:59:40.747760 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9nbmf" event={"ID":"3ca0b945-b7b0-4a88-802c-cb698967faf4","Type":"ContainerStarted","Data":"73adc113ece195b04bb8477bdf3c9ba968385aa4818d3755a5382edf2bd9f99e"} Feb 02 22:59:42 crc kubenswrapper[4755]: I0202 22:59:42.772485 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9nbmf" event={"ID":"3ca0b945-b7b0-4a88-802c-cb698967faf4","Type":"ContainerStarted","Data":"1553778ae838d24689ea8ac0281171367d068a6da448cec9cab1bc0de87ad6c2"} Feb 02 22:59:43 crc kubenswrapper[4755]: I0202 22:59:43.784661 4755 generic.go:334] "Generic (PLEG): container finished" podID="3ca0b945-b7b0-4a88-802c-cb698967faf4" containerID="1553778ae838d24689ea8ac0281171367d068a6da448cec9cab1bc0de87ad6c2" exitCode=0 Feb 02 22:59:43 crc kubenswrapper[4755]: I0202 22:59:43.784841 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9nbmf" event={"ID":"3ca0b945-b7b0-4a88-802c-cb698967faf4","Type":"ContainerDied","Data":"1553778ae838d24689ea8ac0281171367d068a6da448cec9cab1bc0de87ad6c2"} Feb 02 22:59:44 crc kubenswrapper[4755]: I0202 22:59:44.797342 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9nbmf" event={"ID":"3ca0b945-b7b0-4a88-802c-cb698967faf4","Type":"ContainerStarted","Data":"8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30"} Feb 02 22:59:44 crc kubenswrapper[4755]: I0202 22:59:44.822159 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9nbmf" podStartSLOduration=2.389734747 podStartE2EDuration="5.822137149s" podCreationTimestamp="2026-02-02 22:59:39 +0000 UTC" firstStartedPulling="2026-02-02 22:59:40.750771553 +0000 UTC m=+1536.441991919" lastFinishedPulling="2026-02-02 22:59:44.183173935 +0000 UTC m=+1539.874394321" observedRunningTime="2026-02-02 22:59:44.814371872 +0000 UTC m=+1540.505592218" watchObservedRunningTime="2026-02-02 22:59:44.822137149 +0000 UTC m=+1540.513357475" Feb 02 22:59:47 crc kubenswrapper[4755]: I0202 22:59:47.023735 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:47 crc kubenswrapper[4755]: I0202 22:59:47.089236 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k8vgb"] Feb 02 22:59:47 crc kubenswrapper[4755]: I0202 22:59:47.827142 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-k8vgb" podUID="1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" containerName="registry-server" containerID="cri-o://ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284" gracePeriod=2 Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.452706 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.485037 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-utilities\") pod \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\" (UID: \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\") " Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.485275 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-catalog-content\") pod \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\" (UID: \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\") " Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.485456 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52f7t\" (UniqueName: \"kubernetes.io/projected/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-kube-api-access-52f7t\") pod \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\" (UID: \"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd\") " Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.487700 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-utilities" (OuterVolumeSpecName: "utilities") pod "1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" (UID: "1f3f63c1-55c8-41fc-b08d-0bea998c6bfd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.503177 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-kube-api-access-52f7t" (OuterVolumeSpecName: "kube-api-access-52f7t") pod "1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" (UID: "1f3f63c1-55c8-41fc-b08d-0bea998c6bfd"). InnerVolumeSpecName "kube-api-access-52f7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.550529 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" (UID: "1f3f63c1-55c8-41fc-b08d-0bea998c6bfd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.587628 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.587874 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52f7t\" (UniqueName: \"kubernetes.io/projected/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-kube-api-access-52f7t\") on node \"crc\" DevicePath \"\"" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.587940 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.837411 4755 generic.go:334] "Generic (PLEG): container finished" podID="1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" containerID="ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284" exitCode=0 Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.837449 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8vgb" event={"ID":"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd","Type":"ContainerDied","Data":"ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284"} Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.837455 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8vgb" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.837479 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8vgb" event={"ID":"1f3f63c1-55c8-41fc-b08d-0bea998c6bfd","Type":"ContainerDied","Data":"56127b4e441512e6b0cdb3694139d947d9d7cdb717f451915de83f801c455576"} Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.837498 4755 scope.go:117] "RemoveContainer" containerID="ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.876987 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k8vgb"] Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.878404 4755 scope.go:117] "RemoveContainer" containerID="8a2db8ce0dc765f18149ec258e2b0ae74ac4acc3328b281e167fd33ba1353ed6" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.885901 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-k8vgb"] Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.900634 4755 scope.go:117] "RemoveContainer" containerID="e4a31442f61f194df9186884a7c08e6f4e2f9c91e9431e84c9894a81fdfc3b5f" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.946138 4755 scope.go:117] "RemoveContainer" containerID="ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284" Feb 02 22:59:48 crc kubenswrapper[4755]: E0202 22:59:48.946565 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284\": container with ID starting with ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284 not found: ID does not exist" containerID="ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.946619 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284"} err="failed to get container status \"ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284\": rpc error: code = NotFound desc = could not find container \"ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284\": container with ID starting with ffa5167adad759eefbe381c6b454dc20e3ac58b01c3269f3fb5d387006b35284 not found: ID does not exist" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.946642 4755 scope.go:117] "RemoveContainer" containerID="8a2db8ce0dc765f18149ec258e2b0ae74ac4acc3328b281e167fd33ba1353ed6" Feb 02 22:59:48 crc kubenswrapper[4755]: E0202 22:59:48.946921 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a2db8ce0dc765f18149ec258e2b0ae74ac4acc3328b281e167fd33ba1353ed6\": container with ID starting with 8a2db8ce0dc765f18149ec258e2b0ae74ac4acc3328b281e167fd33ba1353ed6 not found: ID does not exist" containerID="8a2db8ce0dc765f18149ec258e2b0ae74ac4acc3328b281e167fd33ba1353ed6" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.947238 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a2db8ce0dc765f18149ec258e2b0ae74ac4acc3328b281e167fd33ba1353ed6"} err="failed to get container status \"8a2db8ce0dc765f18149ec258e2b0ae74ac4acc3328b281e167fd33ba1353ed6\": rpc error: code = NotFound desc = could not find container \"8a2db8ce0dc765f18149ec258e2b0ae74ac4acc3328b281e167fd33ba1353ed6\": container with ID starting with 8a2db8ce0dc765f18149ec258e2b0ae74ac4acc3328b281e167fd33ba1353ed6 not found: ID does not exist" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.947254 4755 scope.go:117] "RemoveContainer" containerID="e4a31442f61f194df9186884a7c08e6f4e2f9c91e9431e84c9894a81fdfc3b5f" Feb 02 22:59:48 crc kubenswrapper[4755]: E0202 22:59:48.947538 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4a31442f61f194df9186884a7c08e6f4e2f9c91e9431e84c9894a81fdfc3b5f\": container with ID starting with e4a31442f61f194df9186884a7c08e6f4e2f9c91e9431e84c9894a81fdfc3b5f not found: ID does not exist" containerID="e4a31442f61f194df9186884a7c08e6f4e2f9c91e9431e84c9894a81fdfc3b5f" Feb 02 22:59:48 crc kubenswrapper[4755]: I0202 22:59:48.947586 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4a31442f61f194df9186884a7c08e6f4e2f9c91e9431e84c9894a81fdfc3b5f"} err="failed to get container status \"e4a31442f61f194df9186884a7c08e6f4e2f9c91e9431e84c9894a81fdfc3b5f\": rpc error: code = NotFound desc = could not find container \"e4a31442f61f194df9186884a7c08e6f4e2f9c91e9431e84c9894a81fdfc3b5f\": container with ID starting with e4a31442f61f194df9186884a7c08e6f4e2f9c91e9431e84c9894a81fdfc3b5f not found: ID does not exist" Feb 02 22:59:49 crc kubenswrapper[4755]: I0202 22:59:49.079947 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" path="/var/lib/kubelet/pods/1f3f63c1-55c8-41fc-b08d-0bea998c6bfd/volumes" Feb 02 22:59:49 crc kubenswrapper[4755]: I0202 22:59:49.792802 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:49 crc kubenswrapper[4755]: I0202 22:59:49.795982 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:49 crc kubenswrapper[4755]: I0202 22:59:49.853561 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:50 crc kubenswrapper[4755]: I0202 22:59:50.954180 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:51 crc kubenswrapper[4755]: I0202 22:59:51.214190 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9nbmf"] Feb 02 22:59:52 crc kubenswrapper[4755]: I0202 22:59:52.923900 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9nbmf" podUID="3ca0b945-b7b0-4a88-802c-cb698967faf4" containerName="registry-server" containerID="cri-o://8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30" gracePeriod=2 Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.391307 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.391596 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.535122 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.713045 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-629rn\" (UniqueName: \"kubernetes.io/projected/3ca0b945-b7b0-4a88-802c-cb698967faf4-kube-api-access-629rn\") pod \"3ca0b945-b7b0-4a88-802c-cb698967faf4\" (UID: \"3ca0b945-b7b0-4a88-802c-cb698967faf4\") " Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.713218 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ca0b945-b7b0-4a88-802c-cb698967faf4-catalog-content\") pod \"3ca0b945-b7b0-4a88-802c-cb698967faf4\" (UID: \"3ca0b945-b7b0-4a88-802c-cb698967faf4\") " Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.713313 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ca0b945-b7b0-4a88-802c-cb698967faf4-utilities\") pod \"3ca0b945-b7b0-4a88-802c-cb698967faf4\" (UID: \"3ca0b945-b7b0-4a88-802c-cb698967faf4\") " Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.714273 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ca0b945-b7b0-4a88-802c-cb698967faf4-utilities" (OuterVolumeSpecName: "utilities") pod "3ca0b945-b7b0-4a88-802c-cb698967faf4" (UID: "3ca0b945-b7b0-4a88-802c-cb698967faf4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.721560 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ca0b945-b7b0-4a88-802c-cb698967faf4-kube-api-access-629rn" (OuterVolumeSpecName: "kube-api-access-629rn") pod "3ca0b945-b7b0-4a88-802c-cb698967faf4" (UID: "3ca0b945-b7b0-4a88-802c-cb698967faf4"). InnerVolumeSpecName "kube-api-access-629rn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.769869 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ca0b945-b7b0-4a88-802c-cb698967faf4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3ca0b945-b7b0-4a88-802c-cb698967faf4" (UID: "3ca0b945-b7b0-4a88-802c-cb698967faf4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.816829 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ca0b945-b7b0-4a88-802c-cb698967faf4-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.816870 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ca0b945-b7b0-4a88-802c-cb698967faf4-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.816888 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-629rn\" (UniqueName: \"kubernetes.io/projected/3ca0b945-b7b0-4a88-802c-cb698967faf4-kube-api-access-629rn\") on node \"crc\" DevicePath \"\"" Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.938463 4755 generic.go:334] "Generic (PLEG): container finished" podID="3ca0b945-b7b0-4a88-802c-cb698967faf4" containerID="8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30" exitCode=0 Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.938524 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9nbmf" event={"ID":"3ca0b945-b7b0-4a88-802c-cb698967faf4","Type":"ContainerDied","Data":"8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30"} Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.938575 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9nbmf" event={"ID":"3ca0b945-b7b0-4a88-802c-cb698967faf4","Type":"ContainerDied","Data":"73adc113ece195b04bb8477bdf3c9ba968385aa4818d3755a5382edf2bd9f99e"} Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.938604 4755 scope.go:117] "RemoveContainer" containerID="8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30" Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.938525 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9nbmf" Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.963629 4755 scope.go:117] "RemoveContainer" containerID="1553778ae838d24689ea8ac0281171367d068a6da448cec9cab1bc0de87ad6c2" Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.983677 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9nbmf"] Feb 02 22:59:53 crc kubenswrapper[4755]: I0202 22:59:53.996178 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9nbmf"] Feb 02 22:59:54 crc kubenswrapper[4755]: I0202 22:59:54.027468 4755 scope.go:117] "RemoveContainer" containerID="eb9fc68a8f3fa6327349b8fefc454bd499919a011f1859b8f9564ffe0cce7e77" Feb 02 22:59:54 crc kubenswrapper[4755]: I0202 22:59:54.072715 4755 scope.go:117] "RemoveContainer" containerID="8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30" Feb 02 22:59:54 crc kubenswrapper[4755]: E0202 22:59:54.073311 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30\": container with ID starting with 8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30 not found: ID does not exist" containerID="8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30" Feb 02 22:59:54 crc kubenswrapper[4755]: I0202 22:59:54.073347 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30"} err="failed to get container status \"8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30\": rpc error: code = NotFound desc = could not find container \"8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30\": container with ID starting with 8e3f59db74dbd2ef9707c4f4aab1186a6298fff229ecb2c47e8343817af04b30 not found: ID does not exist" Feb 02 22:59:54 crc kubenswrapper[4755]: I0202 22:59:54.073371 4755 scope.go:117] "RemoveContainer" containerID="1553778ae838d24689ea8ac0281171367d068a6da448cec9cab1bc0de87ad6c2" Feb 02 22:59:54 crc kubenswrapper[4755]: E0202 22:59:54.073929 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1553778ae838d24689ea8ac0281171367d068a6da448cec9cab1bc0de87ad6c2\": container with ID starting with 1553778ae838d24689ea8ac0281171367d068a6da448cec9cab1bc0de87ad6c2 not found: ID does not exist" containerID="1553778ae838d24689ea8ac0281171367d068a6da448cec9cab1bc0de87ad6c2" Feb 02 22:59:54 crc kubenswrapper[4755]: I0202 22:59:54.073969 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1553778ae838d24689ea8ac0281171367d068a6da448cec9cab1bc0de87ad6c2"} err="failed to get container status \"1553778ae838d24689ea8ac0281171367d068a6da448cec9cab1bc0de87ad6c2\": rpc error: code = NotFound desc = could not find container \"1553778ae838d24689ea8ac0281171367d068a6da448cec9cab1bc0de87ad6c2\": container with ID starting with 1553778ae838d24689ea8ac0281171367d068a6da448cec9cab1bc0de87ad6c2 not found: ID does not exist" Feb 02 22:59:54 crc kubenswrapper[4755]: I0202 22:59:54.073998 4755 scope.go:117] "RemoveContainer" containerID="eb9fc68a8f3fa6327349b8fefc454bd499919a011f1859b8f9564ffe0cce7e77" Feb 02 22:59:54 crc kubenswrapper[4755]: E0202 22:59:54.074571 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb9fc68a8f3fa6327349b8fefc454bd499919a011f1859b8f9564ffe0cce7e77\": container with ID starting with eb9fc68a8f3fa6327349b8fefc454bd499919a011f1859b8f9564ffe0cce7e77 not found: ID does not exist" containerID="eb9fc68a8f3fa6327349b8fefc454bd499919a011f1859b8f9564ffe0cce7e77" Feb 02 22:59:54 crc kubenswrapper[4755]: I0202 22:59:54.074611 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb9fc68a8f3fa6327349b8fefc454bd499919a011f1859b8f9564ffe0cce7e77"} err="failed to get container status \"eb9fc68a8f3fa6327349b8fefc454bd499919a011f1859b8f9564ffe0cce7e77\": rpc error: code = NotFound desc = could not find container \"eb9fc68a8f3fa6327349b8fefc454bd499919a011f1859b8f9564ffe0cce7e77\": container with ID starting with eb9fc68a8f3fa6327349b8fefc454bd499919a011f1859b8f9564ffe0cce7e77 not found: ID does not exist" Feb 02 22:59:54 crc kubenswrapper[4755]: E0202 22:59:54.202740 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ca0b945_b7b0_4a88_802c_cb698967faf4.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ca0b945_b7b0_4a88_802c_cb698967faf4.slice/crio-73adc113ece195b04bb8477bdf3c9ba968385aa4818d3755a5382edf2bd9f99e\": RecentStats: unable to find data in memory cache]" Feb 02 22:59:55 crc kubenswrapper[4755]: I0202 22:59:55.091406 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ca0b945-b7b0-4a88-802c-cb698967faf4" path="/var/lib/kubelet/pods/3ca0b945-b7b0-4a88-802c-cb698967faf4/volumes" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.192791 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65"] Feb 02 23:00:00 crc kubenswrapper[4755]: E0202 23:00:00.194910 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" containerName="registry-server" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.194987 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" containerName="registry-server" Feb 02 23:00:00 crc kubenswrapper[4755]: E0202 23:00:00.195097 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" containerName="extract-content" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.195159 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" containerName="extract-content" Feb 02 23:00:00 crc kubenswrapper[4755]: E0202 23:00:00.195223 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" containerName="extract-utilities" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.195298 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" containerName="extract-utilities" Feb 02 23:00:00 crc kubenswrapper[4755]: E0202 23:00:00.195376 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ca0b945-b7b0-4a88-802c-cb698967faf4" containerName="registry-server" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.195433 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ca0b945-b7b0-4a88-802c-cb698967faf4" containerName="registry-server" Feb 02 23:00:00 crc kubenswrapper[4755]: E0202 23:00:00.195486 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ca0b945-b7b0-4a88-802c-cb698967faf4" containerName="extract-content" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.195533 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ca0b945-b7b0-4a88-802c-cb698967faf4" containerName="extract-content" Feb 02 23:00:00 crc kubenswrapper[4755]: E0202 23:00:00.195618 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ca0b945-b7b0-4a88-802c-cb698967faf4" containerName="extract-utilities" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.195672 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ca0b945-b7b0-4a88-802c-cb698967faf4" containerName="extract-utilities" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.195945 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ca0b945-b7b0-4a88-802c-cb698967faf4" containerName="registry-server" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.196011 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f3f63c1-55c8-41fc-b08d-0bea998c6bfd" containerName="registry-server" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.196775 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.199789 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.200163 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.207289 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65"] Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.369180 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7rdk\" (UniqueName: \"kubernetes.io/projected/f5fb1da2-2ea6-4980-8cee-256560a89653-kube-api-access-m7rdk\") pod \"collect-profiles-29501220-q5d65\" (UID: \"f5fb1da2-2ea6-4980-8cee-256560a89653\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.369232 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f5fb1da2-2ea6-4980-8cee-256560a89653-secret-volume\") pod \"collect-profiles-29501220-q5d65\" (UID: \"f5fb1da2-2ea6-4980-8cee-256560a89653\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.369259 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f5fb1da2-2ea6-4980-8cee-256560a89653-config-volume\") pod \"collect-profiles-29501220-q5d65\" (UID: \"f5fb1da2-2ea6-4980-8cee-256560a89653\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.470981 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7rdk\" (UniqueName: \"kubernetes.io/projected/f5fb1da2-2ea6-4980-8cee-256560a89653-kube-api-access-m7rdk\") pod \"collect-profiles-29501220-q5d65\" (UID: \"f5fb1da2-2ea6-4980-8cee-256560a89653\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.471038 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f5fb1da2-2ea6-4980-8cee-256560a89653-secret-volume\") pod \"collect-profiles-29501220-q5d65\" (UID: \"f5fb1da2-2ea6-4980-8cee-256560a89653\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.471073 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f5fb1da2-2ea6-4980-8cee-256560a89653-config-volume\") pod \"collect-profiles-29501220-q5d65\" (UID: \"f5fb1da2-2ea6-4980-8cee-256560a89653\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.472150 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f5fb1da2-2ea6-4980-8cee-256560a89653-config-volume\") pod \"collect-profiles-29501220-q5d65\" (UID: \"f5fb1da2-2ea6-4980-8cee-256560a89653\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.476494 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f5fb1da2-2ea6-4980-8cee-256560a89653-secret-volume\") pod \"collect-profiles-29501220-q5d65\" (UID: \"f5fb1da2-2ea6-4980-8cee-256560a89653\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.487860 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7rdk\" (UniqueName: \"kubernetes.io/projected/f5fb1da2-2ea6-4980-8cee-256560a89653-kube-api-access-m7rdk\") pod \"collect-profiles-29501220-q5d65\" (UID: \"f5fb1da2-2ea6-4980-8cee-256560a89653\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:00 crc kubenswrapper[4755]: I0202 23:00:00.519109 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:01 crc kubenswrapper[4755]: I0202 23:00:01.055166 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65"] Feb 02 23:00:02 crc kubenswrapper[4755]: I0202 23:00:02.037123 4755 generic.go:334] "Generic (PLEG): container finished" podID="f5fb1da2-2ea6-4980-8cee-256560a89653" containerID="6cae06d829b021f1d447df0f604a58a7eb46e001eee4048c09e69e2383db6e81" exitCode=0 Feb 02 23:00:02 crc kubenswrapper[4755]: I0202 23:00:02.037192 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" event={"ID":"f5fb1da2-2ea6-4980-8cee-256560a89653","Type":"ContainerDied","Data":"6cae06d829b021f1d447df0f604a58a7eb46e001eee4048c09e69e2383db6e81"} Feb 02 23:00:02 crc kubenswrapper[4755]: I0202 23:00:02.037411 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" event={"ID":"f5fb1da2-2ea6-4980-8cee-256560a89653","Type":"ContainerStarted","Data":"e2bee95701057015ff72d55ac5137ccf4eff6ed66c2a4322d42d93e0186c6a7d"} Feb 02 23:00:03 crc kubenswrapper[4755]: I0202 23:00:03.568071 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:03 crc kubenswrapper[4755]: I0202 23:00:03.640465 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7rdk\" (UniqueName: \"kubernetes.io/projected/f5fb1da2-2ea6-4980-8cee-256560a89653-kube-api-access-m7rdk\") pod \"f5fb1da2-2ea6-4980-8cee-256560a89653\" (UID: \"f5fb1da2-2ea6-4980-8cee-256560a89653\") " Feb 02 23:00:03 crc kubenswrapper[4755]: I0202 23:00:03.640692 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f5fb1da2-2ea6-4980-8cee-256560a89653-secret-volume\") pod \"f5fb1da2-2ea6-4980-8cee-256560a89653\" (UID: \"f5fb1da2-2ea6-4980-8cee-256560a89653\") " Feb 02 23:00:03 crc kubenswrapper[4755]: I0202 23:00:03.640718 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f5fb1da2-2ea6-4980-8cee-256560a89653-config-volume\") pod \"f5fb1da2-2ea6-4980-8cee-256560a89653\" (UID: \"f5fb1da2-2ea6-4980-8cee-256560a89653\") " Feb 02 23:00:03 crc kubenswrapper[4755]: I0202 23:00:03.641755 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5fb1da2-2ea6-4980-8cee-256560a89653-config-volume" (OuterVolumeSpecName: "config-volume") pod "f5fb1da2-2ea6-4980-8cee-256560a89653" (UID: "f5fb1da2-2ea6-4980-8cee-256560a89653"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 23:00:03 crc kubenswrapper[4755]: I0202 23:00:03.650946 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5fb1da2-2ea6-4980-8cee-256560a89653-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f5fb1da2-2ea6-4980-8cee-256560a89653" (UID: "f5fb1da2-2ea6-4980-8cee-256560a89653"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:00:03 crc kubenswrapper[4755]: I0202 23:00:03.653185 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5fb1da2-2ea6-4980-8cee-256560a89653-kube-api-access-m7rdk" (OuterVolumeSpecName: "kube-api-access-m7rdk") pod "f5fb1da2-2ea6-4980-8cee-256560a89653" (UID: "f5fb1da2-2ea6-4980-8cee-256560a89653"). InnerVolumeSpecName "kube-api-access-m7rdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:00:03 crc kubenswrapper[4755]: I0202 23:00:03.743390 4755 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f5fb1da2-2ea6-4980-8cee-256560a89653-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 23:00:03 crc kubenswrapper[4755]: I0202 23:00:03.743419 4755 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f5fb1da2-2ea6-4980-8cee-256560a89653-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 23:00:03 crc kubenswrapper[4755]: I0202 23:00:03.743431 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7rdk\" (UniqueName: \"kubernetes.io/projected/f5fb1da2-2ea6-4980-8cee-256560a89653-kube-api-access-m7rdk\") on node \"crc\" DevicePath \"\"" Feb 02 23:00:04 crc kubenswrapper[4755]: I0202 23:00:04.066618 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" event={"ID":"f5fb1da2-2ea6-4980-8cee-256560a89653","Type":"ContainerDied","Data":"e2bee95701057015ff72d55ac5137ccf4eff6ed66c2a4322d42d93e0186c6a7d"} Feb 02 23:00:04 crc kubenswrapper[4755]: I0202 23:00:04.066681 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2bee95701057015ff72d55ac5137ccf4eff6ed66c2a4322d42d93e0186c6a7d" Feb 02 23:00:04 crc kubenswrapper[4755]: I0202 23:00:04.067322 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501220-q5d65" Feb 02 23:00:08 crc kubenswrapper[4755]: I0202 23:00:08.505776 4755 scope.go:117] "RemoveContainer" containerID="5a3c83c20a93b7f8d77a81949e4e79b7a9af74806ed404a68379953d349e6b8b" Feb 02 23:00:08 crc kubenswrapper[4755]: I0202 23:00:08.613085 4755 scope.go:117] "RemoveContainer" containerID="ad7b31f28ff90ca5b9e23cc67681ed4153a56ca002a9ead25fc3c089b7ff456f" Feb 02 23:00:08 crc kubenswrapper[4755]: I0202 23:00:08.652336 4755 scope.go:117] "RemoveContainer" containerID="7c20449becb37e1509e2bcbb6395799eb5bd56dab8a511364e8d95d89937ac9e" Feb 02 23:00:23 crc kubenswrapper[4755]: I0202 23:00:23.389332 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:00:23 crc kubenswrapper[4755]: I0202 23:00:23.390017 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:00:23 crc kubenswrapper[4755]: I0202 23:00:23.390096 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 23:00:23 crc kubenswrapper[4755]: I0202 23:00:23.391437 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63"} pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 23:00:23 crc kubenswrapper[4755]: I0202 23:00:23.391541 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" containerID="cri-o://335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" gracePeriod=600 Feb 02 23:00:23 crc kubenswrapper[4755]: E0202 23:00:23.519286 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:00:24 crc kubenswrapper[4755]: I0202 23:00:24.333938 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" exitCode=0 Feb 02 23:00:24 crc kubenswrapper[4755]: I0202 23:00:24.334045 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerDied","Data":"335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63"} Feb 02 23:00:24 crc kubenswrapper[4755]: I0202 23:00:24.334170 4755 scope.go:117] "RemoveContainer" containerID="e1a0edb6bc3318168553c3186dbd5ca8239787806078b7f1d8e7cf50cd938918" Feb 02 23:00:24 crc kubenswrapper[4755]: I0202 23:00:24.335109 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:00:24 crc kubenswrapper[4755]: E0202 23:00:24.335796 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:00:37 crc kubenswrapper[4755]: I0202 23:00:37.069030 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:00:37 crc kubenswrapper[4755]: E0202 23:00:37.069755 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:00:49 crc kubenswrapper[4755]: I0202 23:00:49.069984 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:00:49 crc kubenswrapper[4755]: E0202 23:00:49.071128 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.154320 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29501221-9nvdx"] Feb 02 23:01:00 crc kubenswrapper[4755]: E0202 23:01:00.155430 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5fb1da2-2ea6-4980-8cee-256560a89653" containerName="collect-profiles" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.155449 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5fb1da2-2ea6-4980-8cee-256560a89653" containerName="collect-profiles" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.155677 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5fb1da2-2ea6-4980-8cee-256560a89653" containerName="collect-profiles" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.156649 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.167502 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29501221-9nvdx"] Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.203289 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqgf4\" (UniqueName: \"kubernetes.io/projected/aae14811-9ad0-4c73-b154-10736e7504c6-kube-api-access-sqgf4\") pod \"keystone-cron-29501221-9nvdx\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.203511 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-config-data\") pod \"keystone-cron-29501221-9nvdx\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.203832 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-fernet-keys\") pod \"keystone-cron-29501221-9nvdx\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.203861 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-combined-ca-bundle\") pod \"keystone-cron-29501221-9nvdx\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.305607 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqgf4\" (UniqueName: \"kubernetes.io/projected/aae14811-9ad0-4c73-b154-10736e7504c6-kube-api-access-sqgf4\") pod \"keystone-cron-29501221-9nvdx\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.305719 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-config-data\") pod \"keystone-cron-29501221-9nvdx\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.305863 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-fernet-keys\") pod \"keystone-cron-29501221-9nvdx\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.305885 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-combined-ca-bundle\") pod \"keystone-cron-29501221-9nvdx\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.312930 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-fernet-keys\") pod \"keystone-cron-29501221-9nvdx\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.312977 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-combined-ca-bundle\") pod \"keystone-cron-29501221-9nvdx\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.314131 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-config-data\") pod \"keystone-cron-29501221-9nvdx\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.321395 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqgf4\" (UniqueName: \"kubernetes.io/projected/aae14811-9ad0-4c73-b154-10736e7504c6-kube-api-access-sqgf4\") pod \"keystone-cron-29501221-9nvdx\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:00 crc kubenswrapper[4755]: I0202 23:01:00.480945 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:01 crc kubenswrapper[4755]: I0202 23:01:01.021833 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29501221-9nvdx"] Feb 02 23:01:01 crc kubenswrapper[4755]: I0202 23:01:01.772509 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29501221-9nvdx" event={"ID":"aae14811-9ad0-4c73-b154-10736e7504c6","Type":"ContainerStarted","Data":"7373444aa44575b934986afb86905b7571a2f9868c048247291ec193298b74ab"} Feb 02 23:01:01 crc kubenswrapper[4755]: I0202 23:01:01.772880 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29501221-9nvdx" event={"ID":"aae14811-9ad0-4c73-b154-10736e7504c6","Type":"ContainerStarted","Data":"3d0c0151d895b81956d8879ffeb340d93b8c7abecfcd531f47f11a53b9f2aeba"} Feb 02 23:01:01 crc kubenswrapper[4755]: I0202 23:01:01.800928 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29501221-9nvdx" podStartSLOduration=1.800910793 podStartE2EDuration="1.800910793s" podCreationTimestamp="2026-02-02 23:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 23:01:01.794390181 +0000 UTC m=+1617.485610517" watchObservedRunningTime="2026-02-02 23:01:01.800910793 +0000 UTC m=+1617.492131119" Feb 02 23:01:04 crc kubenswrapper[4755]: I0202 23:01:04.069432 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:01:04 crc kubenswrapper[4755]: E0202 23:01:04.071491 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:01:04 crc kubenswrapper[4755]: I0202 23:01:04.809639 4755 generic.go:334] "Generic (PLEG): container finished" podID="aae14811-9ad0-4c73-b154-10736e7504c6" containerID="7373444aa44575b934986afb86905b7571a2f9868c048247291ec193298b74ab" exitCode=0 Feb 02 23:01:04 crc kubenswrapper[4755]: I0202 23:01:04.809714 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29501221-9nvdx" event={"ID":"aae14811-9ad0-4c73-b154-10736e7504c6","Type":"ContainerDied","Data":"7373444aa44575b934986afb86905b7571a2f9868c048247291ec193298b74ab"} Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.259022 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.344003 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-config-data\") pod \"aae14811-9ad0-4c73-b154-10736e7504c6\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.344287 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-combined-ca-bundle\") pod \"aae14811-9ad0-4c73-b154-10736e7504c6\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.344351 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-fernet-keys\") pod \"aae14811-9ad0-4c73-b154-10736e7504c6\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.344374 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqgf4\" (UniqueName: \"kubernetes.io/projected/aae14811-9ad0-4c73-b154-10736e7504c6-kube-api-access-sqgf4\") pod \"aae14811-9ad0-4c73-b154-10736e7504c6\" (UID: \"aae14811-9ad0-4c73-b154-10736e7504c6\") " Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.349818 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aae14811-9ad0-4c73-b154-10736e7504c6-kube-api-access-sqgf4" (OuterVolumeSpecName: "kube-api-access-sqgf4") pod "aae14811-9ad0-4c73-b154-10736e7504c6" (UID: "aae14811-9ad0-4c73-b154-10736e7504c6"). InnerVolumeSpecName "kube-api-access-sqgf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.350034 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "aae14811-9ad0-4c73-b154-10736e7504c6" (UID: "aae14811-9ad0-4c73-b154-10736e7504c6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.380873 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aae14811-9ad0-4c73-b154-10736e7504c6" (UID: "aae14811-9ad0-4c73-b154-10736e7504c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.418263 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-config-data" (OuterVolumeSpecName: "config-data") pod "aae14811-9ad0-4c73-b154-10736e7504c6" (UID: "aae14811-9ad0-4c73-b154-10736e7504c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.446845 4755 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.447102 4755 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.447302 4755 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aae14811-9ad0-4c73-b154-10736e7504c6-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.447422 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqgf4\" (UniqueName: \"kubernetes.io/projected/aae14811-9ad0-4c73-b154-10736e7504c6-kube-api-access-sqgf4\") on node \"crc\" DevicePath \"\"" Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.834859 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29501221-9nvdx" event={"ID":"aae14811-9ad0-4c73-b154-10736e7504c6","Type":"ContainerDied","Data":"3d0c0151d895b81956d8879ffeb340d93b8c7abecfcd531f47f11a53b9f2aeba"} Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.834915 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d0c0151d895b81956d8879ffeb340d93b8c7abecfcd531f47f11a53b9f2aeba" Feb 02 23:01:06 crc kubenswrapper[4755]: I0202 23:01:06.834919 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29501221-9nvdx" Feb 02 23:01:08 crc kubenswrapper[4755]: I0202 23:01:08.799567 4755 scope.go:117] "RemoveContainer" containerID="446d14daa03e8b6b461fd6b5532b4b64048f99b269174b061d4c46666f735e82" Feb 02 23:01:08 crc kubenswrapper[4755]: I0202 23:01:08.871122 4755 scope.go:117] "RemoveContainer" containerID="9e76d4b8d4fd58f8070c673771c4dab54e8d08ed55c687d8b000cd092756dcb6" Feb 02 23:01:08 crc kubenswrapper[4755]: I0202 23:01:08.930575 4755 scope.go:117] "RemoveContainer" containerID="02a7be8f4ff0bf2cfddfd57c5c623e159f0d5ce252bc6e79c5531ed99ff304b3" Feb 02 23:01:08 crc kubenswrapper[4755]: I0202 23:01:08.990422 4755 scope.go:117] "RemoveContainer" containerID="f0496aa247df17c0c4a844b37679945755b95ce5a19a813c19f9f84bfb3d4c29" Feb 02 23:01:15 crc kubenswrapper[4755]: I0202 23:01:15.082786 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:01:15 crc kubenswrapper[4755]: E0202 23:01:15.084152 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:01:30 crc kubenswrapper[4755]: I0202 23:01:30.069629 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:01:30 crc kubenswrapper[4755]: E0202 23:01:30.070700 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:01:44 crc kubenswrapper[4755]: I0202 23:01:44.068817 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:01:44 crc kubenswrapper[4755]: E0202 23:01:44.071014 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:01:56 crc kubenswrapper[4755]: I0202 23:01:56.069812 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:01:56 crc kubenswrapper[4755]: E0202 23:01:56.071721 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:01:57 crc kubenswrapper[4755]: I0202 23:01:57.563491 4755 generic.go:334] "Generic (PLEG): container finished" podID="3501887a-bec4-43bd-b0ed-1daf65ae1331" containerID="30067130dadae37055fc7d33ae1aaf8d4505991c0179543b799febfa003bdfab" exitCode=0 Feb 02 23:01:57 crc kubenswrapper[4755]: I0202 23:01:57.563572 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" event={"ID":"3501887a-bec4-43bd-b0ed-1daf65ae1331","Type":"ContainerDied","Data":"30067130dadae37055fc7d33ae1aaf8d4505991c0179543b799febfa003bdfab"} Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.149933 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.233166 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cgls\" (UniqueName: \"kubernetes.io/projected/3501887a-bec4-43bd-b0ed-1daf65ae1331-kube-api-access-5cgls\") pod \"3501887a-bec4-43bd-b0ed-1daf65ae1331\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.233316 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-bootstrap-combined-ca-bundle\") pod \"3501887a-bec4-43bd-b0ed-1daf65ae1331\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.233465 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-ssh-key-openstack-edpm-ipam\") pod \"3501887a-bec4-43bd-b0ed-1daf65ae1331\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.233604 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-inventory\") pod \"3501887a-bec4-43bd-b0ed-1daf65ae1331\" (UID: \"3501887a-bec4-43bd-b0ed-1daf65ae1331\") " Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.239400 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3501887a-bec4-43bd-b0ed-1daf65ae1331-kube-api-access-5cgls" (OuterVolumeSpecName: "kube-api-access-5cgls") pod "3501887a-bec4-43bd-b0ed-1daf65ae1331" (UID: "3501887a-bec4-43bd-b0ed-1daf65ae1331"). InnerVolumeSpecName "kube-api-access-5cgls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.241386 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3501887a-bec4-43bd-b0ed-1daf65ae1331" (UID: "3501887a-bec4-43bd-b0ed-1daf65ae1331"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.271165 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "3501887a-bec4-43bd-b0ed-1daf65ae1331" (UID: "3501887a-bec4-43bd-b0ed-1daf65ae1331"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.284531 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-inventory" (OuterVolumeSpecName: "inventory") pod "3501887a-bec4-43bd-b0ed-1daf65ae1331" (UID: "3501887a-bec4-43bd-b0ed-1daf65ae1331"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.336177 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.336898 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.336933 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cgls\" (UniqueName: \"kubernetes.io/projected/3501887a-bec4-43bd-b0ed-1daf65ae1331-kube-api-access-5cgls\") on node \"crc\" DevicePath \"\"" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.336964 4755 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3501887a-bec4-43bd-b0ed-1daf65ae1331-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.593448 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" event={"ID":"3501887a-bec4-43bd-b0ed-1daf65ae1331","Type":"ContainerDied","Data":"8ca7c9a034b2ae9052d8a7fa75394cabc6f7b0ece352477544b2ca7f6ce968ab"} Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.593503 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ca7c9a034b2ae9052d8a7fa75394cabc6f7b0ece352477544b2ca7f6ce968ab" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.593507 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.706050 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969"] Feb 02 23:01:59 crc kubenswrapper[4755]: E0202 23:01:59.706519 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3501887a-bec4-43bd-b0ed-1daf65ae1331" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.706541 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3501887a-bec4-43bd-b0ed-1daf65ae1331" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 02 23:01:59 crc kubenswrapper[4755]: E0202 23:01:59.706600 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aae14811-9ad0-4c73-b154-10736e7504c6" containerName="keystone-cron" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.706608 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="aae14811-9ad0-4c73-b154-10736e7504c6" containerName="keystone-cron" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.718202 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="aae14811-9ad0-4c73-b154-10736e7504c6" containerName="keystone-cron" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.718239 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3501887a-bec4-43bd-b0ed-1daf65ae1331" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.719410 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969"] Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.719488 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.722385 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.723164 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.724409 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.725072 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.869015 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4krf\" (UniqueName: \"kubernetes.io/projected/1010f2d3-0d67-4d01-8e07-f95412d03443-kube-api-access-b4krf\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h4969\" (UID: \"1010f2d3-0d67-4d01-8e07-f95412d03443\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.869440 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1010f2d3-0d67-4d01-8e07-f95412d03443-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h4969\" (UID: \"1010f2d3-0d67-4d01-8e07-f95412d03443\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.869768 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1010f2d3-0d67-4d01-8e07-f95412d03443-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h4969\" (UID: \"1010f2d3-0d67-4d01-8e07-f95412d03443\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.971863 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4krf\" (UniqueName: \"kubernetes.io/projected/1010f2d3-0d67-4d01-8e07-f95412d03443-kube-api-access-b4krf\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h4969\" (UID: \"1010f2d3-0d67-4d01-8e07-f95412d03443\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.971938 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1010f2d3-0d67-4d01-8e07-f95412d03443-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h4969\" (UID: \"1010f2d3-0d67-4d01-8e07-f95412d03443\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.972040 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1010f2d3-0d67-4d01-8e07-f95412d03443-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h4969\" (UID: \"1010f2d3-0d67-4d01-8e07-f95412d03443\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.975627 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1010f2d3-0d67-4d01-8e07-f95412d03443-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h4969\" (UID: \"1010f2d3-0d67-4d01-8e07-f95412d03443\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.979085 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1010f2d3-0d67-4d01-8e07-f95412d03443-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h4969\" (UID: \"1010f2d3-0d67-4d01-8e07-f95412d03443\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:01:59 crc kubenswrapper[4755]: I0202 23:01:59.992380 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4krf\" (UniqueName: \"kubernetes.io/projected/1010f2d3-0d67-4d01-8e07-f95412d03443-kube-api-access-b4krf\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-h4969\" (UID: \"1010f2d3-0d67-4d01-8e07-f95412d03443\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:02:00 crc kubenswrapper[4755]: I0202 23:02:00.092176 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:02:00 crc kubenswrapper[4755]: I0202 23:02:00.673612 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969"] Feb 02 23:02:00 crc kubenswrapper[4755]: W0202 23:02:00.678172 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1010f2d3_0d67_4d01_8e07_f95412d03443.slice/crio-606916193d9832533f93535417cc28f40f542655c94b27b271c247851ef1aded WatchSource:0}: Error finding container 606916193d9832533f93535417cc28f40f542655c94b27b271c247851ef1aded: Status 404 returned error can't find the container with id 606916193d9832533f93535417cc28f40f542655c94b27b271c247851ef1aded Feb 02 23:02:00 crc kubenswrapper[4755]: I0202 23:02:00.686799 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 23:02:01 crc kubenswrapper[4755]: I0202 23:02:01.616663 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" event={"ID":"1010f2d3-0d67-4d01-8e07-f95412d03443","Type":"ContainerStarted","Data":"d156eae196e66b3e0806ed77a08a0387bd20e321b52900edb51f7b868cc38a77"} Feb 02 23:02:01 crc kubenswrapper[4755]: I0202 23:02:01.617211 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" event={"ID":"1010f2d3-0d67-4d01-8e07-f95412d03443","Type":"ContainerStarted","Data":"606916193d9832533f93535417cc28f40f542655c94b27b271c247851ef1aded"} Feb 02 23:02:01 crc kubenswrapper[4755]: I0202 23:02:01.651135 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" podStartSLOduration=2.202751005 podStartE2EDuration="2.65111634s" podCreationTimestamp="2026-02-02 23:01:59 +0000 UTC" firstStartedPulling="2026-02-02 23:02:00.68651167 +0000 UTC m=+1676.377731996" lastFinishedPulling="2026-02-02 23:02:01.134876985 +0000 UTC m=+1676.826097331" observedRunningTime="2026-02-02 23:02:01.647313453 +0000 UTC m=+1677.338533809" watchObservedRunningTime="2026-02-02 23:02:01.65111634 +0000 UTC m=+1677.342336666" Feb 02 23:02:08 crc kubenswrapper[4755]: I0202 23:02:08.069132 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:02:08 crc kubenswrapper[4755]: E0202 23:02:08.070290 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:02:22 crc kubenswrapper[4755]: I0202 23:02:22.069076 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:02:22 crc kubenswrapper[4755]: E0202 23:02:22.069876 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:02:33 crc kubenswrapper[4755]: I0202 23:02:33.069212 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:02:33 crc kubenswrapper[4755]: E0202 23:02:33.070054 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:02:44 crc kubenswrapper[4755]: I0202 23:02:44.069358 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:02:44 crc kubenswrapper[4755]: E0202 23:02:44.070437 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:02:56 crc kubenswrapper[4755]: I0202 23:02:56.069006 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:02:56 crc kubenswrapper[4755]: E0202 23:02:56.070150 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:02:58 crc kubenswrapper[4755]: I0202 23:02:58.070773 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-tpv4r"] Feb 02 23:02:58 crc kubenswrapper[4755]: I0202 23:02:58.083857 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-8f17-account-create-update-z6n62"] Feb 02 23:02:58 crc kubenswrapper[4755]: I0202 23:02:58.099903 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-ca50-account-create-update-nzx6m"] Feb 02 23:02:58 crc kubenswrapper[4755]: I0202 23:02:58.112397 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-g7476"] Feb 02 23:02:58 crc kubenswrapper[4755]: I0202 23:02:58.123673 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-fc7d-account-create-update-bl74s"] Feb 02 23:02:58 crc kubenswrapper[4755]: I0202 23:02:58.139314 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-tpv4r"] Feb 02 23:02:58 crc kubenswrapper[4755]: I0202 23:02:58.150255 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-nbwvc"] Feb 02 23:02:58 crc kubenswrapper[4755]: I0202 23:02:58.164492 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-ca50-account-create-update-nzx6m"] Feb 02 23:02:58 crc kubenswrapper[4755]: I0202 23:02:58.177279 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-g7476"] Feb 02 23:02:58 crc kubenswrapper[4755]: I0202 23:02:58.189650 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-fc7d-account-create-update-bl74s"] Feb 02 23:02:58 crc kubenswrapper[4755]: I0202 23:02:58.202439 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-nbwvc"] Feb 02 23:02:58 crc kubenswrapper[4755]: I0202 23:02:58.214608 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-8f17-account-create-update-z6n62"] Feb 02 23:02:59 crc kubenswrapper[4755]: I0202 23:02:59.090153 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05549d64-5e5c-4833-9180-b902c9563863" path="/var/lib/kubelet/pods/05549d64-5e5c-4833-9180-b902c9563863/volumes" Feb 02 23:02:59 crc kubenswrapper[4755]: I0202 23:02:59.092072 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a1f428c-a71f-4cfa-8d34-178584bad9a5" path="/var/lib/kubelet/pods/7a1f428c-a71f-4cfa-8d34-178584bad9a5/volumes" Feb 02 23:02:59 crc kubenswrapper[4755]: I0202 23:02:59.093404 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ccddf6f-bdaf-40aa-9c6f-56e695369eeb" path="/var/lib/kubelet/pods/9ccddf6f-bdaf-40aa-9c6f-56e695369eeb/volumes" Feb 02 23:02:59 crc kubenswrapper[4755]: I0202 23:02:59.094637 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1d37e1b-c28c-4fca-823b-3eb4c6e0364b" path="/var/lib/kubelet/pods/a1d37e1b-c28c-4fca-823b-3eb4c6e0364b/volumes" Feb 02 23:02:59 crc kubenswrapper[4755]: I0202 23:02:59.097075 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc8666d1-433d-4bbe-8079-1675a028f85f" path="/var/lib/kubelet/pods/dc8666d1-433d-4bbe-8079-1675a028f85f/volumes" Feb 02 23:02:59 crc kubenswrapper[4755]: I0202 23:02:59.098305 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8c779f8-e41d-4df2-8b72-4812419f750f" path="/var/lib/kubelet/pods/f8c779f8-e41d-4df2-8b72-4812419f750f/volumes" Feb 02 23:03:09 crc kubenswrapper[4755]: I0202 23:03:09.108537 4755 scope.go:117] "RemoveContainer" containerID="550835e02b06a1a6922975bd0a6914f8e27024c00d7d4135fc0c475194741957" Feb 02 23:03:09 crc kubenswrapper[4755]: I0202 23:03:09.157638 4755 scope.go:117] "RemoveContainer" containerID="f3722ac2476d144fef57641d38b14bc0b5d3dce1d30fc099de58866f69d9de04" Feb 02 23:03:09 crc kubenswrapper[4755]: I0202 23:03:09.223322 4755 scope.go:117] "RemoveContainer" containerID="46464d7a60b85fdd8bb5b3a1463a61e2229173cfc9b335e80ed44fba4bf0fe9a" Feb 02 23:03:09 crc kubenswrapper[4755]: I0202 23:03:09.274385 4755 scope.go:117] "RemoveContainer" containerID="aa24f1c132f4b4bc8ab3d80d16774742b8a0e584c58c86c73b7c723474851cec" Feb 02 23:03:09 crc kubenswrapper[4755]: I0202 23:03:09.348265 4755 scope.go:117] "RemoveContainer" containerID="8a749ceb46f50e0da84049c41f6b4dfbeb9cb0a3530cdd09681ded512311ebb3" Feb 02 23:03:09 crc kubenswrapper[4755]: I0202 23:03:09.376388 4755 scope.go:117] "RemoveContainer" containerID="95eba6d72b8aad6257655a0b6dd07c1999278fc7c9cb73c238e2e9b95312bc4b" Feb 02 23:03:11 crc kubenswrapper[4755]: I0202 23:03:11.068950 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:03:11 crc kubenswrapper[4755]: E0202 23:03:11.069742 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.058585 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-8flnx"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.071394 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-8flnx"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.084983 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-6a80-account-create-update-hngf6"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.094334 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-8750-account-create-update-hh7qm"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.104504 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-create-d4c24"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.118619 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-l5tks"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.128330 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-ftxm7"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.136515 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-8j88c"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.144514 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-ed0a-account-create-update-5rdjc"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.152485 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-8750-account-create-update-hh7qm"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.161445 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-6a80-account-create-update-hngf6"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.170304 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-l5tks"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.182541 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-create-d4c24"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.193438 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-ed0a-account-create-update-5rdjc"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.201409 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-2fbd-account-create-update-gv4s4"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.208921 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-ftxm7"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.216083 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-8j88c"] Feb 02 23:03:22 crc kubenswrapper[4755]: I0202 23:03:22.224098 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-2fbd-account-create-update-gv4s4"] Feb 02 23:03:23 crc kubenswrapper[4755]: I0202 23:03:23.094286 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0329c472-ea03-47c4-a6e7-ae1f89169e6b" path="/var/lib/kubelet/pods/0329c472-ea03-47c4-a6e7-ae1f89169e6b/volumes" Feb 02 23:03:23 crc kubenswrapper[4755]: I0202 23:03:23.098000 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0591104a-5134-4402-aa0f-4b94a79aa5df" path="/var/lib/kubelet/pods/0591104a-5134-4402-aa0f-4b94a79aa5df/volumes" Feb 02 23:03:23 crc kubenswrapper[4755]: I0202 23:03:23.099680 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="222237ea-cd02-4919-b29c-9770862a1d51" path="/var/lib/kubelet/pods/222237ea-cd02-4919-b29c-9770862a1d51/volumes" Feb 02 23:03:23 crc kubenswrapper[4755]: I0202 23:03:23.101167 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ca39dc-75b9-48e7-a8c6-2b405d10a219" path="/var/lib/kubelet/pods/49ca39dc-75b9-48e7-a8c6-2b405d10a219/volumes" Feb 02 23:03:23 crc kubenswrapper[4755]: I0202 23:03:23.103891 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71e11ad7-2fb2-4059-997a-107df77d50aa" path="/var/lib/kubelet/pods/71e11ad7-2fb2-4059-997a-107df77d50aa/volumes" Feb 02 23:03:23 crc kubenswrapper[4755]: I0202 23:03:23.105438 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76950441-48ad-4562-bbf6-15fd01c6d2d4" path="/var/lib/kubelet/pods/76950441-48ad-4562-bbf6-15fd01c6d2d4/volumes" Feb 02 23:03:23 crc kubenswrapper[4755]: I0202 23:03:23.106705 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e05b09d-5b2a-4b6a-8ff9-dbbe19018706" path="/var/lib/kubelet/pods/7e05b09d-5b2a-4b6a-8ff9-dbbe19018706/volumes" Feb 02 23:03:23 crc kubenswrapper[4755]: I0202 23:03:23.108417 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd" path="/var/lib/kubelet/pods/82b3aa0f-e2ee-4c4c-81ac-48571c2cdbdd/volumes" Feb 02 23:03:23 crc kubenswrapper[4755]: I0202 23:03:23.110039 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbc7c682-a173-43e9-bca1-404c12b4b333" path="/var/lib/kubelet/pods/bbc7c682-a173-43e9-bca1-404c12b4b333/volumes" Feb 02 23:03:26 crc kubenswrapper[4755]: I0202 23:03:26.069074 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:03:26 crc kubenswrapper[4755]: E0202 23:03:26.070080 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:03:28 crc kubenswrapper[4755]: I0202 23:03:28.047341 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-fdxn5"] Feb 02 23:03:28 crc kubenswrapper[4755]: I0202 23:03:28.070646 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-x247p"] Feb 02 23:03:28 crc kubenswrapper[4755]: I0202 23:03:28.086314 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-fdxn5"] Feb 02 23:03:28 crc kubenswrapper[4755]: I0202 23:03:28.096244 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-x247p"] Feb 02 23:03:29 crc kubenswrapper[4755]: I0202 23:03:29.131225 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02b6b0aa-4f58-4de4-83b7-c3291e005325" path="/var/lib/kubelet/pods/02b6b0aa-4f58-4de4-83b7-c3291e005325/volumes" Feb 02 23:03:29 crc kubenswrapper[4755]: I0202 23:03:29.132199 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47165bb1-af4a-4e73-957e-8f0845a29841" path="/var/lib/kubelet/pods/47165bb1-af4a-4e73-957e-8f0845a29841/volumes" Feb 02 23:03:36 crc kubenswrapper[4755]: I0202 23:03:36.809497 4755 generic.go:334] "Generic (PLEG): container finished" podID="1010f2d3-0d67-4d01-8e07-f95412d03443" containerID="d156eae196e66b3e0806ed77a08a0387bd20e321b52900edb51f7b868cc38a77" exitCode=0 Feb 02 23:03:36 crc kubenswrapper[4755]: I0202 23:03:36.809580 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" event={"ID":"1010f2d3-0d67-4d01-8e07-f95412d03443","Type":"ContainerDied","Data":"d156eae196e66b3e0806ed77a08a0387bd20e321b52900edb51f7b868cc38a77"} Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.078000 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:03:38 crc kubenswrapper[4755]: E0202 23:03:38.078631 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.329955 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.492316 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1010f2d3-0d67-4d01-8e07-f95412d03443-ssh-key-openstack-edpm-ipam\") pod \"1010f2d3-0d67-4d01-8e07-f95412d03443\" (UID: \"1010f2d3-0d67-4d01-8e07-f95412d03443\") " Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.492499 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4krf\" (UniqueName: \"kubernetes.io/projected/1010f2d3-0d67-4d01-8e07-f95412d03443-kube-api-access-b4krf\") pod \"1010f2d3-0d67-4d01-8e07-f95412d03443\" (UID: \"1010f2d3-0d67-4d01-8e07-f95412d03443\") " Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.492550 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1010f2d3-0d67-4d01-8e07-f95412d03443-inventory\") pod \"1010f2d3-0d67-4d01-8e07-f95412d03443\" (UID: \"1010f2d3-0d67-4d01-8e07-f95412d03443\") " Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.497914 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1010f2d3-0d67-4d01-8e07-f95412d03443-kube-api-access-b4krf" (OuterVolumeSpecName: "kube-api-access-b4krf") pod "1010f2d3-0d67-4d01-8e07-f95412d03443" (UID: "1010f2d3-0d67-4d01-8e07-f95412d03443"). InnerVolumeSpecName "kube-api-access-b4krf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.525430 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1010f2d3-0d67-4d01-8e07-f95412d03443-inventory" (OuterVolumeSpecName: "inventory") pod "1010f2d3-0d67-4d01-8e07-f95412d03443" (UID: "1010f2d3-0d67-4d01-8e07-f95412d03443"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.545896 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1010f2d3-0d67-4d01-8e07-f95412d03443-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "1010f2d3-0d67-4d01-8e07-f95412d03443" (UID: "1010f2d3-0d67-4d01-8e07-f95412d03443"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.595043 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4krf\" (UniqueName: \"kubernetes.io/projected/1010f2d3-0d67-4d01-8e07-f95412d03443-kube-api-access-b4krf\") on node \"crc\" DevicePath \"\"" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.595089 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1010f2d3-0d67-4d01-8e07-f95412d03443-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.595109 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1010f2d3-0d67-4d01-8e07-f95412d03443-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.833758 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" event={"ID":"1010f2d3-0d67-4d01-8e07-f95412d03443","Type":"ContainerDied","Data":"606916193d9832533f93535417cc28f40f542655c94b27b271c247851ef1aded"} Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.833809 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="606916193d9832533f93535417cc28f40f542655c94b27b271c247851ef1aded" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.833835 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-h4969" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.936401 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4"] Feb 02 23:03:38 crc kubenswrapper[4755]: E0202 23:03:38.936891 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1010f2d3-0d67-4d01-8e07-f95412d03443" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.936908 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1010f2d3-0d67-4d01-8e07-f95412d03443" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.937131 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1010f2d3-0d67-4d01-8e07-f95412d03443" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.937998 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.940058 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.940068 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.940068 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.940369 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:03:38 crc kubenswrapper[4755]: I0202 23:03:38.965426 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4"] Feb 02 23:03:39 crc kubenswrapper[4755]: I0202 23:03:39.105039 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b68ca425-9be7-41e0-b1c3-5566cb559d71-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4\" (UID: \"b68ca425-9be7-41e0-b1c3-5566cb559d71\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:03:39 crc kubenswrapper[4755]: I0202 23:03:39.105135 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s5t2\" (UniqueName: \"kubernetes.io/projected/b68ca425-9be7-41e0-b1c3-5566cb559d71-kube-api-access-2s5t2\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4\" (UID: \"b68ca425-9be7-41e0-b1c3-5566cb559d71\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:03:39 crc kubenswrapper[4755]: I0202 23:03:39.105183 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b68ca425-9be7-41e0-b1c3-5566cb559d71-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4\" (UID: \"b68ca425-9be7-41e0-b1c3-5566cb559d71\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:03:39 crc kubenswrapper[4755]: I0202 23:03:39.207289 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b68ca425-9be7-41e0-b1c3-5566cb559d71-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4\" (UID: \"b68ca425-9be7-41e0-b1c3-5566cb559d71\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:03:39 crc kubenswrapper[4755]: I0202 23:03:39.207404 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s5t2\" (UniqueName: \"kubernetes.io/projected/b68ca425-9be7-41e0-b1c3-5566cb559d71-kube-api-access-2s5t2\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4\" (UID: \"b68ca425-9be7-41e0-b1c3-5566cb559d71\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:03:39 crc kubenswrapper[4755]: I0202 23:03:39.207455 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b68ca425-9be7-41e0-b1c3-5566cb559d71-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4\" (UID: \"b68ca425-9be7-41e0-b1c3-5566cb559d71\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:03:39 crc kubenswrapper[4755]: I0202 23:03:39.215259 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b68ca425-9be7-41e0-b1c3-5566cb559d71-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4\" (UID: \"b68ca425-9be7-41e0-b1c3-5566cb559d71\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:03:39 crc kubenswrapper[4755]: I0202 23:03:39.220121 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b68ca425-9be7-41e0-b1c3-5566cb559d71-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4\" (UID: \"b68ca425-9be7-41e0-b1c3-5566cb559d71\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:03:39 crc kubenswrapper[4755]: I0202 23:03:39.230577 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s5t2\" (UniqueName: \"kubernetes.io/projected/b68ca425-9be7-41e0-b1c3-5566cb559d71-kube-api-access-2s5t2\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4\" (UID: \"b68ca425-9be7-41e0-b1c3-5566cb559d71\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:03:39 crc kubenswrapper[4755]: I0202 23:03:39.259885 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:03:39 crc kubenswrapper[4755]: I0202 23:03:39.855197 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4"] Feb 02 23:03:40 crc kubenswrapper[4755]: I0202 23:03:40.852773 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" event={"ID":"b68ca425-9be7-41e0-b1c3-5566cb559d71","Type":"ContainerStarted","Data":"b84e734dd7d53186d028570e38bcc92b02c1efb10141ef492e902e745e54f1d5"} Feb 02 23:03:40 crc kubenswrapper[4755]: I0202 23:03:40.854496 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" event={"ID":"b68ca425-9be7-41e0-b1c3-5566cb559d71","Type":"ContainerStarted","Data":"981c84082a729dc384e15491b2040f54c252d434ea993306b548d38c1ed48d15"} Feb 02 23:03:40 crc kubenswrapper[4755]: I0202 23:03:40.884420 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" podStartSLOduration=2.341904247 podStartE2EDuration="2.884398348s" podCreationTimestamp="2026-02-02 23:03:38 +0000 UTC" firstStartedPulling="2026-02-02 23:03:39.864574745 +0000 UTC m=+1775.555795071" lastFinishedPulling="2026-02-02 23:03:40.407068836 +0000 UTC m=+1776.098289172" observedRunningTime="2026-02-02 23:03:40.874939493 +0000 UTC m=+1776.566159819" watchObservedRunningTime="2026-02-02 23:03:40.884398348 +0000 UTC m=+1776.575618674" Feb 02 23:03:50 crc kubenswrapper[4755]: I0202 23:03:50.068774 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:03:50 crc kubenswrapper[4755]: E0202 23:03:50.069813 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:03:57 crc kubenswrapper[4755]: I0202 23:03:57.068849 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-2vwzg"] Feb 02 23:03:57 crc kubenswrapper[4755]: I0202 23:03:57.094419 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-2vwzg"] Feb 02 23:03:59 crc kubenswrapper[4755]: I0202 23:03:59.080145 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45b91834-5ada-4402-85c4-df681a85c076" path="/var/lib/kubelet/pods/45b91834-5ada-4402-85c4-df681a85c076/volumes" Feb 02 23:04:02 crc kubenswrapper[4755]: I0202 23:04:02.069631 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:04:02 crc kubenswrapper[4755]: E0202 23:04:02.070765 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:04:09 crc kubenswrapper[4755]: I0202 23:04:09.567833 4755 scope.go:117] "RemoveContainer" containerID="cbe5dd77be314a6e9d29b7d1de91006f88ae3f2b97d528bda6e8ea07549aaaa9" Feb 02 23:04:09 crc kubenswrapper[4755]: I0202 23:04:09.613783 4755 scope.go:117] "RemoveContainer" containerID="7aabbf2e1f23dd6de8aa371f3ec6a15a6ef91c01c78a3530b65aac3e04fb72d7" Feb 02 23:04:09 crc kubenswrapper[4755]: I0202 23:04:09.681867 4755 scope.go:117] "RemoveContainer" containerID="d092ca52d36f228841cd93f4785f52915e51a33c8cd3f9895ea67c0cf3520843" Feb 02 23:04:09 crc kubenswrapper[4755]: I0202 23:04:09.753392 4755 scope.go:117] "RemoveContainer" containerID="3fc18d501e2d47e033382102a5b3a7e985c17be2b7bc804f2ddbb27a543708ee" Feb 02 23:04:09 crc kubenswrapper[4755]: I0202 23:04:09.808199 4755 scope.go:117] "RemoveContainer" containerID="1720f86c20eca7c8b729f20ca7a07c4f32522ff9478534f117fa8228cc7d58a1" Feb 02 23:04:09 crc kubenswrapper[4755]: I0202 23:04:09.863460 4755 scope.go:117] "RemoveContainer" containerID="7b189a7fb96caf3325c5394a26afe2bf80760acdd0bff5a3f2a7ad16a5e23074" Feb 02 23:04:09 crc kubenswrapper[4755]: I0202 23:04:09.897216 4755 scope.go:117] "RemoveContainer" containerID="860e0b0b085a0b082e86f1391c72d6a9122cb59891614f23987abcefce7dc5e8" Feb 02 23:04:09 crc kubenswrapper[4755]: I0202 23:04:09.947336 4755 scope.go:117] "RemoveContainer" containerID="6c1233e6a1a40b69e2fbca5939af32b41351ad71476088a045c58131c4d0500d" Feb 02 23:04:09 crc kubenswrapper[4755]: I0202 23:04:09.973047 4755 scope.go:117] "RemoveContainer" containerID="04973ba3e744473c95923eba257bd6d357aa2bdaf2b0ed653785bddc20928783" Feb 02 23:04:10 crc kubenswrapper[4755]: I0202 23:04:10.003282 4755 scope.go:117] "RemoveContainer" containerID="2e512cc897c06d8cdd94d0c3021b290bde7f9250a0d58d86a9b882dc4178d456" Feb 02 23:04:10 crc kubenswrapper[4755]: I0202 23:04:10.066573 4755 scope.go:117] "RemoveContainer" containerID="94f0599d9e1c444580ae409efc65d62f2f4e4e60c54210f2d95cd84bd7eec41b" Feb 02 23:04:10 crc kubenswrapper[4755]: I0202 23:04:10.102363 4755 scope.go:117] "RemoveContainer" containerID="3d16d5af66ed73c47119bff4be610c1f40ad459c5adc302a3f2e9e788bc47598" Feb 02 23:04:10 crc kubenswrapper[4755]: I0202 23:04:10.134814 4755 scope.go:117] "RemoveContainer" containerID="799487a328697fc12fc93257bc5606057c3a99dea78b6ba2ea7403f33c04099c" Feb 02 23:04:10 crc kubenswrapper[4755]: I0202 23:04:10.170584 4755 scope.go:117] "RemoveContainer" containerID="f8d6ac97ab0fb1c0cd200baf81a8b1624050135da7f88fdfe94d99fc391d9a19" Feb 02 23:04:12 crc kubenswrapper[4755]: I0202 23:04:12.058703 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-sk4gl"] Feb 02 23:04:12 crc kubenswrapper[4755]: I0202 23:04:12.073031 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-sk4gl"] Feb 02 23:04:13 crc kubenswrapper[4755]: I0202 23:04:13.053713 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-55dvt"] Feb 02 23:04:13 crc kubenswrapper[4755]: I0202 23:04:13.063549 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-55dvt"] Feb 02 23:04:13 crc kubenswrapper[4755]: I0202 23:04:13.070111 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:04:13 crc kubenswrapper[4755]: E0202 23:04:13.070474 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:04:13 crc kubenswrapper[4755]: I0202 23:04:13.081529 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27d7e0ea-4abd-4afe-be9b-460fbfea81c7" path="/var/lib/kubelet/pods/27d7e0ea-4abd-4afe-be9b-460fbfea81c7/volumes" Feb 02 23:04:13 crc kubenswrapper[4755]: I0202 23:04:13.082325 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f55b2a0-624c-46b1-bede-8cb15264838e" path="/var/lib/kubelet/pods/4f55b2a0-624c-46b1-bede-8cb15264838e/volumes" Feb 02 23:04:18 crc kubenswrapper[4755]: I0202 23:04:18.039935 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-xwr7s"] Feb 02 23:04:18 crc kubenswrapper[4755]: I0202 23:04:18.051612 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-xwr7s"] Feb 02 23:04:19 crc kubenswrapper[4755]: I0202 23:04:19.085603 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda" path="/var/lib/kubelet/pods/e55a2cd8-108b-4aa0-8d97-4fe1b06d8dda/volumes" Feb 02 23:04:27 crc kubenswrapper[4755]: I0202 23:04:27.069274 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:04:27 crc kubenswrapper[4755]: E0202 23:04:27.070696 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:04:32 crc kubenswrapper[4755]: I0202 23:04:32.051010 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-pwclg"] Feb 02 23:04:32 crc kubenswrapper[4755]: I0202 23:04:32.062415 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-pwclg"] Feb 02 23:04:33 crc kubenswrapper[4755]: I0202 23:04:33.086261 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0403cc43-6199-4e95-b427-c4f268d8049a" path="/var/lib/kubelet/pods/0403cc43-6199-4e95-b427-c4f268d8049a/volumes" Feb 02 23:04:41 crc kubenswrapper[4755]: I0202 23:04:41.068678 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:04:41 crc kubenswrapper[4755]: E0202 23:04:41.069394 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:04:53 crc kubenswrapper[4755]: I0202 23:04:53.671379 4755 generic.go:334] "Generic (PLEG): container finished" podID="b68ca425-9be7-41e0-b1c3-5566cb559d71" containerID="b84e734dd7d53186d028570e38bcc92b02c1efb10141ef492e902e745e54f1d5" exitCode=0 Feb 02 23:04:53 crc kubenswrapper[4755]: I0202 23:04:53.671574 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" event={"ID":"b68ca425-9be7-41e0-b1c3-5566cb559d71","Type":"ContainerDied","Data":"b84e734dd7d53186d028570e38bcc92b02c1efb10141ef492e902e745e54f1d5"} Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.180917 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.303832 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b68ca425-9be7-41e0-b1c3-5566cb559d71-inventory\") pod \"b68ca425-9be7-41e0-b1c3-5566cb559d71\" (UID: \"b68ca425-9be7-41e0-b1c3-5566cb559d71\") " Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.304221 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b68ca425-9be7-41e0-b1c3-5566cb559d71-ssh-key-openstack-edpm-ipam\") pod \"b68ca425-9be7-41e0-b1c3-5566cb559d71\" (UID: \"b68ca425-9be7-41e0-b1c3-5566cb559d71\") " Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.304250 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2s5t2\" (UniqueName: \"kubernetes.io/projected/b68ca425-9be7-41e0-b1c3-5566cb559d71-kube-api-access-2s5t2\") pod \"b68ca425-9be7-41e0-b1c3-5566cb559d71\" (UID: \"b68ca425-9be7-41e0-b1c3-5566cb559d71\") " Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.315078 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b68ca425-9be7-41e0-b1c3-5566cb559d71-kube-api-access-2s5t2" (OuterVolumeSpecName: "kube-api-access-2s5t2") pod "b68ca425-9be7-41e0-b1c3-5566cb559d71" (UID: "b68ca425-9be7-41e0-b1c3-5566cb559d71"). InnerVolumeSpecName "kube-api-access-2s5t2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.341223 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b68ca425-9be7-41e0-b1c3-5566cb559d71-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "b68ca425-9be7-41e0-b1c3-5566cb559d71" (UID: "b68ca425-9be7-41e0-b1c3-5566cb559d71"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.345639 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b68ca425-9be7-41e0-b1c3-5566cb559d71-inventory" (OuterVolumeSpecName: "inventory") pod "b68ca425-9be7-41e0-b1c3-5566cb559d71" (UID: "b68ca425-9be7-41e0-b1c3-5566cb559d71"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.407188 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b68ca425-9be7-41e0-b1c3-5566cb559d71-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.407228 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2s5t2\" (UniqueName: \"kubernetes.io/projected/b68ca425-9be7-41e0-b1c3-5566cb559d71-kube-api-access-2s5t2\") on node \"crc\" DevicePath \"\"" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.407241 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b68ca425-9be7-41e0-b1c3-5566cb559d71-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.699493 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" event={"ID":"b68ca425-9be7-41e0-b1c3-5566cb559d71","Type":"ContainerDied","Data":"981c84082a729dc384e15491b2040f54c252d434ea993306b548d38c1ed48d15"} Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.699891 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="981c84082a729dc384e15491b2040f54c252d434ea993306b548d38c1ed48d15" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.700247 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.800339 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85"] Feb 02 23:04:55 crc kubenswrapper[4755]: E0202 23:04:55.800912 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b68ca425-9be7-41e0-b1c3-5566cb559d71" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.800937 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b68ca425-9be7-41e0-b1c3-5566cb559d71" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.801273 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b68ca425-9be7-41e0-b1c3-5566cb559d71" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.802300 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.804316 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.804706 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.804935 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.805085 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.811213 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85"] Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.931091 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw798\" (UniqueName: \"kubernetes.io/projected/ea9bda0f-9dcd-4175-940e-023f11ef0f44-kube-api-access-dw798\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-f6v85\" (UID: \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.931177 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea9bda0f-9dcd-4175-940e-023f11ef0f44-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-f6v85\" (UID: \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:04:55 crc kubenswrapper[4755]: I0202 23:04:55.931518 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ea9bda0f-9dcd-4175-940e-023f11ef0f44-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-f6v85\" (UID: \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:04:56 crc kubenswrapper[4755]: I0202 23:04:56.034019 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dw798\" (UniqueName: \"kubernetes.io/projected/ea9bda0f-9dcd-4175-940e-023f11ef0f44-kube-api-access-dw798\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-f6v85\" (UID: \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:04:56 crc kubenswrapper[4755]: I0202 23:04:56.034193 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea9bda0f-9dcd-4175-940e-023f11ef0f44-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-f6v85\" (UID: \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:04:56 crc kubenswrapper[4755]: I0202 23:04:56.034659 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ea9bda0f-9dcd-4175-940e-023f11ef0f44-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-f6v85\" (UID: \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:04:56 crc kubenswrapper[4755]: I0202 23:04:56.043455 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ea9bda0f-9dcd-4175-940e-023f11ef0f44-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-f6v85\" (UID: \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:04:56 crc kubenswrapper[4755]: I0202 23:04:56.046524 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea9bda0f-9dcd-4175-940e-023f11ef0f44-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-f6v85\" (UID: \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:04:56 crc kubenswrapper[4755]: I0202 23:04:56.066625 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dw798\" (UniqueName: \"kubernetes.io/projected/ea9bda0f-9dcd-4175-940e-023f11ef0f44-kube-api-access-dw798\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-f6v85\" (UID: \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:04:56 crc kubenswrapper[4755]: I0202 23:04:56.068847 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:04:56 crc kubenswrapper[4755]: E0202 23:04:56.069272 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:04:56 crc kubenswrapper[4755]: I0202 23:04:56.131846 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:04:56 crc kubenswrapper[4755]: I0202 23:04:56.678307 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85"] Feb 02 23:04:56 crc kubenswrapper[4755]: W0202 23:04:56.685033 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea9bda0f_9dcd_4175_940e_023f11ef0f44.slice/crio-b61a7581ee308b310a87f8665ddb8fb465f3ac86d0f4ec13da55c47283eb6b01 WatchSource:0}: Error finding container b61a7581ee308b310a87f8665ddb8fb465f3ac86d0f4ec13da55c47283eb6b01: Status 404 returned error can't find the container with id b61a7581ee308b310a87f8665ddb8fb465f3ac86d0f4ec13da55c47283eb6b01 Feb 02 23:04:56 crc kubenswrapper[4755]: I0202 23:04:56.710086 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" event={"ID":"ea9bda0f-9dcd-4175-940e-023f11ef0f44","Type":"ContainerStarted","Data":"b61a7581ee308b310a87f8665ddb8fb465f3ac86d0f4ec13da55c47283eb6b01"} Feb 02 23:04:57 crc kubenswrapper[4755]: I0202 23:04:57.721076 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" event={"ID":"ea9bda0f-9dcd-4175-940e-023f11ef0f44","Type":"ContainerStarted","Data":"7a13ad44914496596a72ff1428a68d507f0b62fba7b6c08ddfe439ac1b8b66f9"} Feb 02 23:04:57 crc kubenswrapper[4755]: I0202 23:04:57.756210 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" podStartSLOduration=2.268487886 podStartE2EDuration="2.75619196s" podCreationTimestamp="2026-02-02 23:04:55 +0000 UTC" firstStartedPulling="2026-02-02 23:04:56.689171061 +0000 UTC m=+1852.380391427" lastFinishedPulling="2026-02-02 23:04:57.176875165 +0000 UTC m=+1852.868095501" observedRunningTime="2026-02-02 23:04:57.744853782 +0000 UTC m=+1853.436074138" watchObservedRunningTime="2026-02-02 23:04:57.75619196 +0000 UTC m=+1853.447412296" Feb 02 23:05:02 crc kubenswrapper[4755]: I0202 23:05:02.812944 4755 generic.go:334] "Generic (PLEG): container finished" podID="ea9bda0f-9dcd-4175-940e-023f11ef0f44" containerID="7a13ad44914496596a72ff1428a68d507f0b62fba7b6c08ddfe439ac1b8b66f9" exitCode=0 Feb 02 23:05:02 crc kubenswrapper[4755]: I0202 23:05:02.813047 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" event={"ID":"ea9bda0f-9dcd-4175-940e-023f11ef0f44","Type":"ContainerDied","Data":"7a13ad44914496596a72ff1428a68d507f0b62fba7b6c08ddfe439ac1b8b66f9"} Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.354797 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.534571 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dw798\" (UniqueName: \"kubernetes.io/projected/ea9bda0f-9dcd-4175-940e-023f11ef0f44-kube-api-access-dw798\") pod \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\" (UID: \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\") " Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.534713 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ea9bda0f-9dcd-4175-940e-023f11ef0f44-ssh-key-openstack-edpm-ipam\") pod \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\" (UID: \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\") " Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.535040 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea9bda0f-9dcd-4175-940e-023f11ef0f44-inventory\") pod \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\" (UID: \"ea9bda0f-9dcd-4175-940e-023f11ef0f44\") " Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.539757 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea9bda0f-9dcd-4175-940e-023f11ef0f44-kube-api-access-dw798" (OuterVolumeSpecName: "kube-api-access-dw798") pod "ea9bda0f-9dcd-4175-940e-023f11ef0f44" (UID: "ea9bda0f-9dcd-4175-940e-023f11ef0f44"). InnerVolumeSpecName "kube-api-access-dw798". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.563216 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea9bda0f-9dcd-4175-940e-023f11ef0f44-inventory" (OuterVolumeSpecName: "inventory") pod "ea9bda0f-9dcd-4175-940e-023f11ef0f44" (UID: "ea9bda0f-9dcd-4175-940e-023f11ef0f44"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.586923 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea9bda0f-9dcd-4175-940e-023f11ef0f44-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ea9bda0f-9dcd-4175-940e-023f11ef0f44" (UID: "ea9bda0f-9dcd-4175-940e-023f11ef0f44"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.637436 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea9bda0f-9dcd-4175-940e-023f11ef0f44-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.637468 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dw798\" (UniqueName: \"kubernetes.io/projected/ea9bda0f-9dcd-4175-940e-023f11ef0f44-kube-api-access-dw798\") on node \"crc\" DevicePath \"\"" Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.637479 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ea9bda0f-9dcd-4175-940e-023f11ef0f44-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.847606 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" event={"ID":"ea9bda0f-9dcd-4175-940e-023f11ef0f44","Type":"ContainerDied","Data":"b61a7581ee308b310a87f8665ddb8fb465f3ac86d0f4ec13da55c47283eb6b01"} Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.847655 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b61a7581ee308b310a87f8665ddb8fb465f3ac86d0f4ec13da55c47283eb6b01" Feb 02 23:05:04 crc kubenswrapper[4755]: I0202 23:05:04.847719 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-f6v85" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.031653 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq"] Feb 02 23:05:05 crc kubenswrapper[4755]: E0202 23:05:05.032224 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea9bda0f-9dcd-4175-940e-023f11ef0f44" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.032247 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea9bda0f-9dcd-4175-940e-023f11ef0f44" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.032528 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea9bda0f-9dcd-4175-940e-023f11ef0f44" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.033498 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.051578 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq"] Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.058197 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.058474 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.058630 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.058793 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.150157 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szhnq\" (UID: \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.150249 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szhnq\" (UID: \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.150528 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr9n5\" (UniqueName: \"kubernetes.io/projected/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-kube-api-access-sr9n5\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szhnq\" (UID: \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.252814 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szhnq\" (UID: \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.252909 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szhnq\" (UID: \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.253014 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr9n5\" (UniqueName: \"kubernetes.io/projected/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-kube-api-access-sr9n5\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szhnq\" (UID: \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.257203 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szhnq\" (UID: \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.270626 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szhnq\" (UID: \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.274274 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr9n5\" (UniqueName: \"kubernetes.io/projected/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-kube-api-access-sr9n5\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-szhnq\" (UID: \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:05 crc kubenswrapper[4755]: I0202 23:05:05.380255 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:06 crc kubenswrapper[4755]: I0202 23:05:06.050597 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq"] Feb 02 23:05:06 crc kubenswrapper[4755]: I0202 23:05:06.892130 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" event={"ID":"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc","Type":"ContainerStarted","Data":"f4c97d0d3f7eeb8cafbf76bc8ba8d07d5acace3968ca3a9f6386db3b0899fb19"} Feb 02 23:05:06 crc kubenswrapper[4755]: I0202 23:05:06.892492 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" event={"ID":"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc","Type":"ContainerStarted","Data":"67157957ba63a60b850683e4581639eee0855cdba66bdc873a620330de946e8f"} Feb 02 23:05:06 crc kubenswrapper[4755]: I0202 23:05:06.909011 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" podStartSLOduration=2.466521086 podStartE2EDuration="2.908981909s" podCreationTimestamp="2026-02-02 23:05:04 +0000 UTC" firstStartedPulling="2026-02-02 23:05:06.094941544 +0000 UTC m=+1861.786161870" lastFinishedPulling="2026-02-02 23:05:06.537402357 +0000 UTC m=+1862.228622693" observedRunningTime="2026-02-02 23:05:06.905030088 +0000 UTC m=+1862.596250424" watchObservedRunningTime="2026-02-02 23:05:06.908981909 +0000 UTC m=+1862.600202235" Feb 02 23:05:07 crc kubenswrapper[4755]: I0202 23:05:07.068850 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:05:07 crc kubenswrapper[4755]: E0202 23:05:07.069151 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:05:10 crc kubenswrapper[4755]: I0202 23:05:10.518474 4755 scope.go:117] "RemoveContainer" containerID="16855b8ef35a76bebbf6699613df39982b91b3371c8f70a65ccc74159a20b7f4" Feb 02 23:05:10 crc kubenswrapper[4755]: I0202 23:05:10.585690 4755 scope.go:117] "RemoveContainer" containerID="93fab4df7bcf4224d2cfaa86fd176eddac2020cd566c469af102de4402e8778f" Feb 02 23:05:10 crc kubenswrapper[4755]: I0202 23:05:10.657548 4755 scope.go:117] "RemoveContainer" containerID="b96da00dacfa0456a5c7b3629e2126674aac3e023dfa1a6921d3fd3b76bae1db" Feb 02 23:05:10 crc kubenswrapper[4755]: I0202 23:05:10.701556 4755 scope.go:117] "RemoveContainer" containerID="9115f92997d485b695ce9cd0af538bbda2028bf538d2e9d879f856d82c88c465" Feb 02 23:05:18 crc kubenswrapper[4755]: I0202 23:05:18.056117 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-jwwfl"] Feb 02 23:05:18 crc kubenswrapper[4755]: I0202 23:05:18.071388 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-f9e8-account-create-update-p7swg"] Feb 02 23:05:18 crc kubenswrapper[4755]: I0202 23:05:18.084341 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-f9e8-account-create-update-p7swg"] Feb 02 23:05:18 crc kubenswrapper[4755]: I0202 23:05:18.099906 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-jwwfl"] Feb 02 23:05:19 crc kubenswrapper[4755]: I0202 23:05:19.105708 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29a63582-71ab-4de9-a5ab-ca97b11b0a73" path="/var/lib/kubelet/pods/29a63582-71ab-4de9-a5ab-ca97b11b0a73/volumes" Feb 02 23:05:19 crc kubenswrapper[4755]: I0202 23:05:19.106402 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0cbc632-66aa-4301-952c-a59fcbd3e884" path="/var/lib/kubelet/pods/c0cbc632-66aa-4301-952c-a59fcbd3e884/volumes" Feb 02 23:05:19 crc kubenswrapper[4755]: I0202 23:05:19.107150 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-3aa0-account-create-update-5fhf8"] Feb 02 23:05:19 crc kubenswrapper[4755]: I0202 23:05:19.107196 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-wdj6p"] Feb 02 23:05:19 crc kubenswrapper[4755]: I0202 23:05:19.111673 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-zzz84"] Feb 02 23:05:19 crc kubenswrapper[4755]: I0202 23:05:19.119850 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-3aa0-account-create-update-5fhf8"] Feb 02 23:05:19 crc kubenswrapper[4755]: I0202 23:05:19.127521 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-wdj6p"] Feb 02 23:05:19 crc kubenswrapper[4755]: I0202 23:05:19.134927 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-zzz84"] Feb 02 23:05:19 crc kubenswrapper[4755]: I0202 23:05:19.142629 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-6bd1-account-create-update-7s7bt"] Feb 02 23:05:19 crc kubenswrapper[4755]: I0202 23:05:19.150360 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-6bd1-account-create-update-7s7bt"] Feb 02 23:05:20 crc kubenswrapper[4755]: I0202 23:05:20.069909 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:05:20 crc kubenswrapper[4755]: E0202 23:05:20.070534 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:05:21 crc kubenswrapper[4755]: I0202 23:05:21.089593 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f3d167f-44ce-4d19-a53c-b9d370837a3d" path="/var/lib/kubelet/pods/1f3d167f-44ce-4d19-a53c-b9d370837a3d/volumes" Feb 02 23:05:21 crc kubenswrapper[4755]: I0202 23:05:21.090348 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fcb6639-8e5c-41c3-8b9c-ce398dffaba8" path="/var/lib/kubelet/pods/3fcb6639-8e5c-41c3-8b9c-ce398dffaba8/volumes" Feb 02 23:05:21 crc kubenswrapper[4755]: I0202 23:05:21.091037 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b957bd8-a821-475c-9316-46a0e81487a8" path="/var/lib/kubelet/pods/5b957bd8-a821-475c-9316-46a0e81487a8/volumes" Feb 02 23:05:21 crc kubenswrapper[4755]: I0202 23:05:21.091711 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8" path="/var/lib/kubelet/pods/9fdcaf90-1e56-4e33-9a2b-ba9f2655d2c8/volumes" Feb 02 23:05:35 crc kubenswrapper[4755]: I0202 23:05:35.079352 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:05:36 crc kubenswrapper[4755]: I0202 23:05:36.257107 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"30bad18ffb9e582a32f758848770c077d9c7b99e4ba3e8c0b3b77128686a49e6"} Feb 02 23:05:44 crc kubenswrapper[4755]: I0202 23:05:44.354161 4755 generic.go:334] "Generic (PLEG): container finished" podID="3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc" containerID="f4c97d0d3f7eeb8cafbf76bc8ba8d07d5acace3968ca3a9f6386db3b0899fb19" exitCode=0 Feb 02 23:05:44 crc kubenswrapper[4755]: I0202 23:05:44.354229 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" event={"ID":"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc","Type":"ContainerDied","Data":"f4c97d0d3f7eeb8cafbf76bc8ba8d07d5acace3968ca3a9f6386db3b0899fb19"} Feb 02 23:05:45 crc kubenswrapper[4755]: I0202 23:05:45.830140 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:45 crc kubenswrapper[4755]: I0202 23:05:45.944775 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr9n5\" (UniqueName: \"kubernetes.io/projected/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-kube-api-access-sr9n5\") pod \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\" (UID: \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\") " Feb 02 23:05:45 crc kubenswrapper[4755]: I0202 23:05:45.945100 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-ssh-key-openstack-edpm-ipam\") pod \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\" (UID: \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\") " Feb 02 23:05:45 crc kubenswrapper[4755]: I0202 23:05:45.945479 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-inventory\") pod \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\" (UID: \"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc\") " Feb 02 23:05:45 crc kubenswrapper[4755]: I0202 23:05:45.953904 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-kube-api-access-sr9n5" (OuterVolumeSpecName: "kube-api-access-sr9n5") pod "3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc" (UID: "3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc"). InnerVolumeSpecName "kube-api-access-sr9n5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:05:45 crc kubenswrapper[4755]: I0202 23:05:45.989270 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-inventory" (OuterVolumeSpecName: "inventory") pod "3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc" (UID: "3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.001903 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc" (UID: "3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.051387 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr9n5\" (UniqueName: \"kubernetes.io/projected/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-kube-api-access-sr9n5\") on node \"crc\" DevicePath \"\"" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.051440 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.051461 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.082712 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jgvwd"] Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.096398 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jgvwd"] Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.378604 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" event={"ID":"3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc","Type":"ContainerDied","Data":"67157957ba63a60b850683e4581639eee0855cdba66bdc873a620330de946e8f"} Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.378861 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67157957ba63a60b850683e4581639eee0855cdba66bdc873a620330de946e8f" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.378677 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-szhnq" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.468843 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq"] Feb 02 23:05:46 crc kubenswrapper[4755]: E0202 23:05:46.469274 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.469292 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.469515 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.470235 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.474080 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.474264 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.474370 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.474530 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.479582 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq"] Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.663271 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/21f9cef1-eabf-447e-abbb-a8c7d5627994-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq\" (UID: \"21f9cef1-eabf-447e-abbb-a8c7d5627994\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.663371 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwjx6\" (UniqueName: \"kubernetes.io/projected/21f9cef1-eabf-447e-abbb-a8c7d5627994-kube-api-access-qwjx6\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq\" (UID: \"21f9cef1-eabf-447e-abbb-a8c7d5627994\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.663456 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21f9cef1-eabf-447e-abbb-a8c7d5627994-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq\" (UID: \"21f9cef1-eabf-447e-abbb-a8c7d5627994\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.766128 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwjx6\" (UniqueName: \"kubernetes.io/projected/21f9cef1-eabf-447e-abbb-a8c7d5627994-kube-api-access-qwjx6\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq\" (UID: \"21f9cef1-eabf-447e-abbb-a8c7d5627994\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.766399 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21f9cef1-eabf-447e-abbb-a8c7d5627994-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq\" (UID: \"21f9cef1-eabf-447e-abbb-a8c7d5627994\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.766580 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/21f9cef1-eabf-447e-abbb-a8c7d5627994-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq\" (UID: \"21f9cef1-eabf-447e-abbb-a8c7d5627994\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.772612 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/21f9cef1-eabf-447e-abbb-a8c7d5627994-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq\" (UID: \"21f9cef1-eabf-447e-abbb-a8c7d5627994\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.773566 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21f9cef1-eabf-447e-abbb-a8c7d5627994-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq\" (UID: \"21f9cef1-eabf-447e-abbb-a8c7d5627994\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:05:46 crc kubenswrapper[4755]: I0202 23:05:46.799357 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwjx6\" (UniqueName: \"kubernetes.io/projected/21f9cef1-eabf-447e-abbb-a8c7d5627994-kube-api-access-qwjx6\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq\" (UID: \"21f9cef1-eabf-447e-abbb-a8c7d5627994\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:05:47 crc kubenswrapper[4755]: I0202 23:05:47.090476 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afe1508f-09cc-4874-b8b4-560d879e2e49" path="/var/lib/kubelet/pods/afe1508f-09cc-4874-b8b4-560d879e2e49/volumes" Feb 02 23:05:47 crc kubenswrapper[4755]: I0202 23:05:47.093903 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:05:47 crc kubenswrapper[4755]: W0202 23:05:47.696345 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21f9cef1_eabf_447e_abbb_a8c7d5627994.slice/crio-15908fb494980253105a1432949795856bb9ceb9f56e626868f6fb4015773389 WatchSource:0}: Error finding container 15908fb494980253105a1432949795856bb9ceb9f56e626868f6fb4015773389: Status 404 returned error can't find the container with id 15908fb494980253105a1432949795856bb9ceb9f56e626868f6fb4015773389 Feb 02 23:05:47 crc kubenswrapper[4755]: I0202 23:05:47.697521 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq"] Feb 02 23:05:48 crc kubenswrapper[4755]: I0202 23:05:48.402636 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" event={"ID":"21f9cef1-eabf-447e-abbb-a8c7d5627994","Type":"ContainerStarted","Data":"15908fb494980253105a1432949795856bb9ceb9f56e626868f6fb4015773389"} Feb 02 23:05:49 crc kubenswrapper[4755]: I0202 23:05:49.417507 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" event={"ID":"21f9cef1-eabf-447e-abbb-a8c7d5627994","Type":"ContainerStarted","Data":"a49e47005d10bca97f1e4f1419a93250d22e29eddd15ad0e8c6b0609d57fa146"} Feb 02 23:05:49 crc kubenswrapper[4755]: I0202 23:05:49.470948 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" podStartSLOduration=3.024957385 podStartE2EDuration="3.470914057s" podCreationTimestamp="2026-02-02 23:05:46 +0000 UTC" firstStartedPulling="2026-02-02 23:05:47.699595663 +0000 UTC m=+1903.390815989" lastFinishedPulling="2026-02-02 23:05:48.145552325 +0000 UTC m=+1903.836772661" observedRunningTime="2026-02-02 23:05:49.447350485 +0000 UTC m=+1905.138570841" watchObservedRunningTime="2026-02-02 23:05:49.470914057 +0000 UTC m=+1905.162134423" Feb 02 23:06:05 crc kubenswrapper[4755]: I0202 23:06:05.043053 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-8cxf8"] Feb 02 23:06:05 crc kubenswrapper[4755]: I0202 23:06:05.054303 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-8cxf8"] Feb 02 23:06:05 crc kubenswrapper[4755]: I0202 23:06:05.101791 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98b3b96f-3477-4af7-b598-a1d45e6ffbc6" path="/var/lib/kubelet/pods/98b3b96f-3477-4af7-b598-a1d45e6ffbc6/volumes" Feb 02 23:06:07 crc kubenswrapper[4755]: I0202 23:06:07.038565 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rnblj"] Feb 02 23:06:07 crc kubenswrapper[4755]: I0202 23:06:07.050539 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-rnblj"] Feb 02 23:06:07 crc kubenswrapper[4755]: I0202 23:06:07.085175 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2940cf3-bd51-4393-8e05-6af05f9131ad" path="/var/lib/kubelet/pods/a2940cf3-bd51-4393-8e05-6af05f9131ad/volumes" Feb 02 23:06:10 crc kubenswrapper[4755]: I0202 23:06:10.855357 4755 scope.go:117] "RemoveContainer" containerID="2360ddbfc06549d7f0a9b865f800a78fc532f116e0c42aa4dad699141a4387ec" Feb 02 23:06:10 crc kubenswrapper[4755]: I0202 23:06:10.898597 4755 scope.go:117] "RemoveContainer" containerID="25751954eea5109293ea8bcedc5cab316b0a1cdaf0f5b1e357e5e3dd3796d9ee" Feb 02 23:06:10 crc kubenswrapper[4755]: I0202 23:06:10.939125 4755 scope.go:117] "RemoveContainer" containerID="96b3b4f82e7155a2511283cdad4ad6b98845ccd5cd92ea505830eadbb353ec72" Feb 02 23:06:10 crc kubenswrapper[4755]: I0202 23:06:10.991258 4755 scope.go:117] "RemoveContainer" containerID="2a58ff7ea96df1ef5d3125c206fadc47431b673998f7f23ce7f6c519928b6504" Feb 02 23:06:11 crc kubenswrapper[4755]: I0202 23:06:11.060688 4755 scope.go:117] "RemoveContainer" containerID="34ef74b8a9e586cb0ca95b126934071193f5b9a8aac551a0622699b5f0760bab" Feb 02 23:06:11 crc kubenswrapper[4755]: I0202 23:06:11.106539 4755 scope.go:117] "RemoveContainer" containerID="779ec7c3d27a9ddd6a055ed372ce6dcca6fe056357b54ffcb54ffb237d193cea" Feb 02 23:06:11 crc kubenswrapper[4755]: I0202 23:06:11.169605 4755 scope.go:117] "RemoveContainer" containerID="eacabd414151b6051e6ad6dec8e77721fa956b24657208b3128f6511dd7e14c7" Feb 02 23:06:11 crc kubenswrapper[4755]: I0202 23:06:11.201200 4755 scope.go:117] "RemoveContainer" containerID="a478a38b0d155268bdef66870aa039cd21d67f1464e4b71b70065ff6e0edbea3" Feb 02 23:06:11 crc kubenswrapper[4755]: I0202 23:06:11.223112 4755 scope.go:117] "RemoveContainer" containerID="27b22a6641cce1d2838824643dbf5e80c6fad787fa7acc3bd3a4b01aecee1e92" Feb 02 23:06:39 crc kubenswrapper[4755]: I0202 23:06:39.011423 4755 generic.go:334] "Generic (PLEG): container finished" podID="21f9cef1-eabf-447e-abbb-a8c7d5627994" containerID="a49e47005d10bca97f1e4f1419a93250d22e29eddd15ad0e8c6b0609d57fa146" exitCode=0 Feb 02 23:06:39 crc kubenswrapper[4755]: I0202 23:06:39.011523 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" event={"ID":"21f9cef1-eabf-447e-abbb-a8c7d5627994","Type":"ContainerDied","Data":"a49e47005d10bca97f1e4f1419a93250d22e29eddd15ad0e8c6b0609d57fa146"} Feb 02 23:06:40 crc kubenswrapper[4755]: I0202 23:06:40.550475 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:06:40 crc kubenswrapper[4755]: I0202 23:06:40.615055 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21f9cef1-eabf-447e-abbb-a8c7d5627994-inventory\") pod \"21f9cef1-eabf-447e-abbb-a8c7d5627994\" (UID: \"21f9cef1-eabf-447e-abbb-a8c7d5627994\") " Feb 02 23:06:40 crc kubenswrapper[4755]: I0202 23:06:40.615185 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwjx6\" (UniqueName: \"kubernetes.io/projected/21f9cef1-eabf-447e-abbb-a8c7d5627994-kube-api-access-qwjx6\") pod \"21f9cef1-eabf-447e-abbb-a8c7d5627994\" (UID: \"21f9cef1-eabf-447e-abbb-a8c7d5627994\") " Feb 02 23:06:40 crc kubenswrapper[4755]: I0202 23:06:40.615358 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/21f9cef1-eabf-447e-abbb-a8c7d5627994-ssh-key-openstack-edpm-ipam\") pod \"21f9cef1-eabf-447e-abbb-a8c7d5627994\" (UID: \"21f9cef1-eabf-447e-abbb-a8c7d5627994\") " Feb 02 23:06:40 crc kubenswrapper[4755]: I0202 23:06:40.621957 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21f9cef1-eabf-447e-abbb-a8c7d5627994-kube-api-access-qwjx6" (OuterVolumeSpecName: "kube-api-access-qwjx6") pod "21f9cef1-eabf-447e-abbb-a8c7d5627994" (UID: "21f9cef1-eabf-447e-abbb-a8c7d5627994"). InnerVolumeSpecName "kube-api-access-qwjx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:06:40 crc kubenswrapper[4755]: I0202 23:06:40.646065 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21f9cef1-eabf-447e-abbb-a8c7d5627994-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "21f9cef1-eabf-447e-abbb-a8c7d5627994" (UID: "21f9cef1-eabf-447e-abbb-a8c7d5627994"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:06:40 crc kubenswrapper[4755]: I0202 23:06:40.661906 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21f9cef1-eabf-447e-abbb-a8c7d5627994-inventory" (OuterVolumeSpecName: "inventory") pod "21f9cef1-eabf-447e-abbb-a8c7d5627994" (UID: "21f9cef1-eabf-447e-abbb-a8c7d5627994"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:06:40 crc kubenswrapper[4755]: I0202 23:06:40.723310 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/21f9cef1-eabf-447e-abbb-a8c7d5627994-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:06:40 crc kubenswrapper[4755]: I0202 23:06:40.723822 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21f9cef1-eabf-447e-abbb-a8c7d5627994-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:06:40 crc kubenswrapper[4755]: I0202 23:06:40.723966 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwjx6\" (UniqueName: \"kubernetes.io/projected/21f9cef1-eabf-447e-abbb-a8c7d5627994-kube-api-access-qwjx6\") on node \"crc\" DevicePath \"\"" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.041006 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" event={"ID":"21f9cef1-eabf-447e-abbb-a8c7d5627994","Type":"ContainerDied","Data":"15908fb494980253105a1432949795856bb9ceb9f56e626868f6fb4015773389"} Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.041063 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15908fb494980253105a1432949795856bb9ceb9f56e626868f6fb4015773389" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.041114 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.194306 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-l6kfd"] Feb 02 23:06:41 crc kubenswrapper[4755]: E0202 23:06:41.195252 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21f9cef1-eabf-447e-abbb-a8c7d5627994" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.195269 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="21f9cef1-eabf-447e-abbb-a8c7d5627994" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.195865 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="21f9cef1-eabf-447e-abbb-a8c7d5627994" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.198208 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.201046 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.203329 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.203560 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.204313 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.210707 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-l6kfd"] Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.336231 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m826k\" (UniqueName: \"kubernetes.io/projected/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-kube-api-access-m826k\") pod \"ssh-known-hosts-edpm-deployment-l6kfd\" (UID: \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\") " pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.336408 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-l6kfd\" (UID: \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\") " pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.336489 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-l6kfd\" (UID: \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\") " pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.439202 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-l6kfd\" (UID: \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\") " pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.439327 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m826k\" (UniqueName: \"kubernetes.io/projected/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-kube-api-access-m826k\") pod \"ssh-known-hosts-edpm-deployment-l6kfd\" (UID: \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\") " pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.439579 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-l6kfd\" (UID: \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\") " pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.444567 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-l6kfd\" (UID: \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\") " pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.445448 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-l6kfd\" (UID: \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\") " pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.457333 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m826k\" (UniqueName: \"kubernetes.io/projected/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-kube-api-access-m826k\") pod \"ssh-known-hosts-edpm-deployment-l6kfd\" (UID: \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\") " pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:41 crc kubenswrapper[4755]: I0202 23:06:41.524478 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:42 crc kubenswrapper[4755]: I0202 23:06:42.115174 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-l6kfd"] Feb 02 23:06:43 crc kubenswrapper[4755]: I0202 23:06:43.061513 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" event={"ID":"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0","Type":"ContainerStarted","Data":"fd42c69a2081122355308d139a47b22a28cc41bb882e64a18445d4efdc70c302"} Feb 02 23:06:43 crc kubenswrapper[4755]: I0202 23:06:43.062012 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" event={"ID":"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0","Type":"ContainerStarted","Data":"2b1abbd42724fa493b328f1ca0d715c7875ea8742ffe0f2f71a87b8f8e0d17bd"} Feb 02 23:06:43 crc kubenswrapper[4755]: I0202 23:06:43.085073 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" podStartSLOduration=1.543328973 podStartE2EDuration="2.085057243s" podCreationTimestamp="2026-02-02 23:06:41 +0000 UTC" firstStartedPulling="2026-02-02 23:06:42.113542396 +0000 UTC m=+1957.804762722" lastFinishedPulling="2026-02-02 23:06:42.655270626 +0000 UTC m=+1958.346490992" observedRunningTime="2026-02-02 23:06:43.079306161 +0000 UTC m=+1958.770526507" watchObservedRunningTime="2026-02-02 23:06:43.085057243 +0000 UTC m=+1958.776277569" Feb 02 23:06:50 crc kubenswrapper[4755]: I0202 23:06:50.063107 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-qbjlm"] Feb 02 23:06:50 crc kubenswrapper[4755]: I0202 23:06:50.075833 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-qbjlm"] Feb 02 23:06:50 crc kubenswrapper[4755]: I0202 23:06:50.156784 4755 generic.go:334] "Generic (PLEG): container finished" podID="42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0" containerID="fd42c69a2081122355308d139a47b22a28cc41bb882e64a18445d4efdc70c302" exitCode=0 Feb 02 23:06:50 crc kubenswrapper[4755]: I0202 23:06:50.156852 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" event={"ID":"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0","Type":"ContainerDied","Data":"fd42c69a2081122355308d139a47b22a28cc41bb882e64a18445d4efdc70c302"} Feb 02 23:06:51 crc kubenswrapper[4755]: I0202 23:06:51.083617 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35a5dd6b-95f6-43c8-b929-bd370640aa10" path="/var/lib/kubelet/pods/35a5dd6b-95f6-43c8-b929-bd370640aa10/volumes" Feb 02 23:06:51 crc kubenswrapper[4755]: I0202 23:06:51.668376 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:51 crc kubenswrapper[4755]: I0202 23:06:51.810037 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-inventory-0\") pod \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\" (UID: \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\") " Feb 02 23:06:51 crc kubenswrapper[4755]: I0202 23:06:51.810119 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-ssh-key-openstack-edpm-ipam\") pod \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\" (UID: \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\") " Feb 02 23:06:51 crc kubenswrapper[4755]: I0202 23:06:51.810381 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m826k\" (UniqueName: \"kubernetes.io/projected/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-kube-api-access-m826k\") pod \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\" (UID: \"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0\") " Feb 02 23:06:51 crc kubenswrapper[4755]: I0202 23:06:51.818867 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-kube-api-access-m826k" (OuterVolumeSpecName: "kube-api-access-m826k") pod "42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0" (UID: "42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0"). InnerVolumeSpecName "kube-api-access-m826k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:06:51 crc kubenswrapper[4755]: I0202 23:06:51.840142 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0" (UID: "42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:06:51 crc kubenswrapper[4755]: I0202 23:06:51.870057 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0" (UID: "42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:06:51 crc kubenswrapper[4755]: I0202 23:06:51.913917 4755 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-inventory-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:06:51 crc kubenswrapper[4755]: I0202 23:06:51.913991 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:06:51 crc kubenswrapper[4755]: I0202 23:06:51.914017 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m826k\" (UniqueName: \"kubernetes.io/projected/42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0-kube-api-access-m826k\") on node \"crc\" DevicePath \"\"" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.181936 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" event={"ID":"42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0","Type":"ContainerDied","Data":"2b1abbd42724fa493b328f1ca0d715c7875ea8742ffe0f2f71a87b8f8e0d17bd"} Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.181992 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b1abbd42724fa493b328f1ca0d715c7875ea8742ffe0f2f71a87b8f8e0d17bd" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.182912 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-l6kfd" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.282001 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4"] Feb 02 23:06:52 crc kubenswrapper[4755]: E0202 23:06:52.282904 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0" containerName="ssh-known-hosts-edpm-deployment" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.283044 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0" containerName="ssh-known-hosts-edpm-deployment" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.283567 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0" containerName="ssh-known-hosts-edpm-deployment" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.284974 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.288500 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.288870 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.290580 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.299644 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.307930 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4"] Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.428388 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1c1128ff-2549-455b-bee1-186751b6966b-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vvwd4\" (UID: \"1c1128ff-2549-455b-bee1-186751b6966b\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.428799 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2l8v\" (UniqueName: \"kubernetes.io/projected/1c1128ff-2549-455b-bee1-186751b6966b-kube-api-access-c2l8v\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vvwd4\" (UID: \"1c1128ff-2549-455b-bee1-186751b6966b\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.429198 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1c1128ff-2549-455b-bee1-186751b6966b-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vvwd4\" (UID: \"1c1128ff-2549-455b-bee1-186751b6966b\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.531586 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1c1128ff-2549-455b-bee1-186751b6966b-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vvwd4\" (UID: \"1c1128ff-2549-455b-bee1-186751b6966b\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.532140 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1c1128ff-2549-455b-bee1-186751b6966b-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vvwd4\" (UID: \"1c1128ff-2549-455b-bee1-186751b6966b\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.533147 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2l8v\" (UniqueName: \"kubernetes.io/projected/1c1128ff-2549-455b-bee1-186751b6966b-kube-api-access-c2l8v\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vvwd4\" (UID: \"1c1128ff-2549-455b-bee1-186751b6966b\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.540229 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1c1128ff-2549-455b-bee1-186751b6966b-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vvwd4\" (UID: \"1c1128ff-2549-455b-bee1-186751b6966b\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.558675 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1c1128ff-2549-455b-bee1-186751b6966b-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vvwd4\" (UID: \"1c1128ff-2549-455b-bee1-186751b6966b\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.565156 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2l8v\" (UniqueName: \"kubernetes.io/projected/1c1128ff-2549-455b-bee1-186751b6966b-kube-api-access-c2l8v\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-vvwd4\" (UID: \"1c1128ff-2549-455b-bee1-186751b6966b\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:06:52 crc kubenswrapper[4755]: I0202 23:06:52.611069 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:06:53 crc kubenswrapper[4755]: I0202 23:06:53.331064 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4"] Feb 02 23:06:53 crc kubenswrapper[4755]: W0202 23:06:53.339745 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c1128ff_2549_455b_bee1_186751b6966b.slice/crio-f66acd4241bb14716f8aa5c3b82761732a2c2b5e891e88fe85cb48029d00724b WatchSource:0}: Error finding container f66acd4241bb14716f8aa5c3b82761732a2c2b5e891e88fe85cb48029d00724b: Status 404 returned error can't find the container with id f66acd4241bb14716f8aa5c3b82761732a2c2b5e891e88fe85cb48029d00724b Feb 02 23:06:54 crc kubenswrapper[4755]: I0202 23:06:54.209996 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" event={"ID":"1c1128ff-2549-455b-bee1-186751b6966b","Type":"ContainerStarted","Data":"cab48116c5e5de505a443f2be47bbfdbf7f70e86ac425e598294b5502ec79a65"} Feb 02 23:06:54 crc kubenswrapper[4755]: I0202 23:06:54.210446 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" event={"ID":"1c1128ff-2549-455b-bee1-186751b6966b","Type":"ContainerStarted","Data":"f66acd4241bb14716f8aa5c3b82761732a2c2b5e891e88fe85cb48029d00724b"} Feb 02 23:06:54 crc kubenswrapper[4755]: I0202 23:06:54.240437 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" podStartSLOduration=1.68703674 podStartE2EDuration="2.240404907s" podCreationTimestamp="2026-02-02 23:06:52 +0000 UTC" firstStartedPulling="2026-02-02 23:06:53.34294866 +0000 UTC m=+1969.034168996" lastFinishedPulling="2026-02-02 23:06:53.896316797 +0000 UTC m=+1969.587537163" observedRunningTime="2026-02-02 23:06:54.228356249 +0000 UTC m=+1969.919576635" watchObservedRunningTime="2026-02-02 23:06:54.240404907 +0000 UTC m=+1969.931625263" Feb 02 23:07:03 crc kubenswrapper[4755]: I0202 23:07:03.344251 4755 generic.go:334] "Generic (PLEG): container finished" podID="1c1128ff-2549-455b-bee1-186751b6966b" containerID="cab48116c5e5de505a443f2be47bbfdbf7f70e86ac425e598294b5502ec79a65" exitCode=0 Feb 02 23:07:03 crc kubenswrapper[4755]: I0202 23:07:03.344354 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" event={"ID":"1c1128ff-2549-455b-bee1-186751b6966b","Type":"ContainerDied","Data":"cab48116c5e5de505a443f2be47bbfdbf7f70e86ac425e598294b5502ec79a65"} Feb 02 23:07:04 crc kubenswrapper[4755]: I0202 23:07:04.899205 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.061075 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2l8v\" (UniqueName: \"kubernetes.io/projected/1c1128ff-2549-455b-bee1-186751b6966b-kube-api-access-c2l8v\") pod \"1c1128ff-2549-455b-bee1-186751b6966b\" (UID: \"1c1128ff-2549-455b-bee1-186751b6966b\") " Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.061450 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1c1128ff-2549-455b-bee1-186751b6966b-inventory\") pod \"1c1128ff-2549-455b-bee1-186751b6966b\" (UID: \"1c1128ff-2549-455b-bee1-186751b6966b\") " Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.061499 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1c1128ff-2549-455b-bee1-186751b6966b-ssh-key-openstack-edpm-ipam\") pod \"1c1128ff-2549-455b-bee1-186751b6966b\" (UID: \"1c1128ff-2549-455b-bee1-186751b6966b\") " Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.073274 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c1128ff-2549-455b-bee1-186751b6966b-kube-api-access-c2l8v" (OuterVolumeSpecName: "kube-api-access-c2l8v") pod "1c1128ff-2549-455b-bee1-186751b6966b" (UID: "1c1128ff-2549-455b-bee1-186751b6966b"). InnerVolumeSpecName "kube-api-access-c2l8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.096029 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c1128ff-2549-455b-bee1-186751b6966b-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "1c1128ff-2549-455b-bee1-186751b6966b" (UID: "1c1128ff-2549-455b-bee1-186751b6966b"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.106854 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c1128ff-2549-455b-bee1-186751b6966b-inventory" (OuterVolumeSpecName: "inventory") pod "1c1128ff-2549-455b-bee1-186751b6966b" (UID: "1c1128ff-2549-455b-bee1-186751b6966b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.164463 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1c1128ff-2549-455b-bee1-186751b6966b-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.164529 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1c1128ff-2549-455b-bee1-186751b6966b-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.164561 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2l8v\" (UniqueName: \"kubernetes.io/projected/1c1128ff-2549-455b-bee1-186751b6966b-kube-api-access-c2l8v\") on node \"crc\" DevicePath \"\"" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.372359 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" event={"ID":"1c1128ff-2549-455b-bee1-186751b6966b","Type":"ContainerDied","Data":"f66acd4241bb14716f8aa5c3b82761732a2c2b5e891e88fe85cb48029d00724b"} Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.372432 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f66acd4241bb14716f8aa5c3b82761732a2c2b5e891e88fe85cb48029d00724b" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.372543 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-vvwd4" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.503947 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7"] Feb 02 23:07:05 crc kubenswrapper[4755]: E0202 23:07:05.504636 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c1128ff-2549-455b-bee1-186751b6966b" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.504672 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c1128ff-2549-455b-bee1-186751b6966b" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.505169 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c1128ff-2549-455b-bee1-186751b6966b" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.506462 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.514756 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.514797 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.514766 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.523267 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.538717 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7"] Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.678133 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f7fc429c-671b-4085-ab07-8f4b2c53f496-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7\" (UID: \"f7fc429c-671b-4085-ab07-8f4b2c53f496\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.678234 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7fc429c-671b-4085-ab07-8f4b2c53f496-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7\" (UID: \"f7fc429c-671b-4085-ab07-8f4b2c53f496\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.678272 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7wpk\" (UniqueName: \"kubernetes.io/projected/f7fc429c-671b-4085-ab07-8f4b2c53f496-kube-api-access-w7wpk\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7\" (UID: \"f7fc429c-671b-4085-ab07-8f4b2c53f496\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.780189 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f7fc429c-671b-4085-ab07-8f4b2c53f496-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7\" (UID: \"f7fc429c-671b-4085-ab07-8f4b2c53f496\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.780375 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7fc429c-671b-4085-ab07-8f4b2c53f496-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7\" (UID: \"f7fc429c-671b-4085-ab07-8f4b2c53f496\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.780441 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7wpk\" (UniqueName: \"kubernetes.io/projected/f7fc429c-671b-4085-ab07-8f4b2c53f496-kube-api-access-w7wpk\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7\" (UID: \"f7fc429c-671b-4085-ab07-8f4b2c53f496\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.784991 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7fc429c-671b-4085-ab07-8f4b2c53f496-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7\" (UID: \"f7fc429c-671b-4085-ab07-8f4b2c53f496\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.786173 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f7fc429c-671b-4085-ab07-8f4b2c53f496-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7\" (UID: \"f7fc429c-671b-4085-ab07-8f4b2c53f496\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.797238 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7wpk\" (UniqueName: \"kubernetes.io/projected/f7fc429c-671b-4085-ab07-8f4b2c53f496-kube-api-access-w7wpk\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7\" (UID: \"f7fc429c-671b-4085-ab07-8f4b2c53f496\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:05 crc kubenswrapper[4755]: I0202 23:07:05.854398 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:06 crc kubenswrapper[4755]: I0202 23:07:06.418860 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7"] Feb 02 23:07:06 crc kubenswrapper[4755]: I0202 23:07:06.423064 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 23:07:07 crc kubenswrapper[4755]: I0202 23:07:07.397427 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" event={"ID":"f7fc429c-671b-4085-ab07-8f4b2c53f496","Type":"ContainerStarted","Data":"e2d3c5dadfd24384322d05d0b5647bd1811a33a3c811a5a5580c6e4899d9d2a9"} Feb 02 23:07:07 crc kubenswrapper[4755]: I0202 23:07:07.397972 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" event={"ID":"f7fc429c-671b-4085-ab07-8f4b2c53f496","Type":"ContainerStarted","Data":"255d439a32fbefbcb406936cddf5a0d35b9951f6d34ccb4cd9be9f686bf9629e"} Feb 02 23:07:07 crc kubenswrapper[4755]: I0202 23:07:07.447862 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" podStartSLOduration=2.006961029 podStartE2EDuration="2.447836018s" podCreationTimestamp="2026-02-02 23:07:05 +0000 UTC" firstStartedPulling="2026-02-02 23:07:06.42287185 +0000 UTC m=+1982.114092176" lastFinishedPulling="2026-02-02 23:07:06.863746839 +0000 UTC m=+1982.554967165" observedRunningTime="2026-02-02 23:07:07.42012323 +0000 UTC m=+1983.111343626" watchObservedRunningTime="2026-02-02 23:07:07.447836018 +0000 UTC m=+1983.139056374" Feb 02 23:07:11 crc kubenswrapper[4755]: I0202 23:07:11.425093 4755 scope.go:117] "RemoveContainer" containerID="ad5a820a59f4260604fdb19aaaf313ac4f4580866dc5fc6ca733d716e78c3e84" Feb 02 23:07:16 crc kubenswrapper[4755]: E0202 23:07:16.917968 4755 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7fc429c_671b_4085_ab07_8f4b2c53f496.slice/crio-e2d3c5dadfd24384322d05d0b5647bd1811a33a3c811a5a5580c6e4899d9d2a9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7fc429c_671b_4085_ab07_8f4b2c53f496.slice/crio-conmon-e2d3c5dadfd24384322d05d0b5647bd1811a33a3c811a5a5580c6e4899d9d2a9.scope\": RecentStats: unable to find data in memory cache]" Feb 02 23:07:17 crc kubenswrapper[4755]: I0202 23:07:17.539606 4755 generic.go:334] "Generic (PLEG): container finished" podID="f7fc429c-671b-4085-ab07-8f4b2c53f496" containerID="e2d3c5dadfd24384322d05d0b5647bd1811a33a3c811a5a5580c6e4899d9d2a9" exitCode=0 Feb 02 23:07:17 crc kubenswrapper[4755]: I0202 23:07:17.539704 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" event={"ID":"f7fc429c-671b-4085-ab07-8f4b2c53f496","Type":"ContainerDied","Data":"e2d3c5dadfd24384322d05d0b5647bd1811a33a3c811a5a5580c6e4899d9d2a9"} Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.114211 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.219335 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7wpk\" (UniqueName: \"kubernetes.io/projected/f7fc429c-671b-4085-ab07-8f4b2c53f496-kube-api-access-w7wpk\") pod \"f7fc429c-671b-4085-ab07-8f4b2c53f496\" (UID: \"f7fc429c-671b-4085-ab07-8f4b2c53f496\") " Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.219419 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7fc429c-671b-4085-ab07-8f4b2c53f496-inventory\") pod \"f7fc429c-671b-4085-ab07-8f4b2c53f496\" (UID: \"f7fc429c-671b-4085-ab07-8f4b2c53f496\") " Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.219552 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f7fc429c-671b-4085-ab07-8f4b2c53f496-ssh-key-openstack-edpm-ipam\") pod \"f7fc429c-671b-4085-ab07-8f4b2c53f496\" (UID: \"f7fc429c-671b-4085-ab07-8f4b2c53f496\") " Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.225923 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7fc429c-671b-4085-ab07-8f4b2c53f496-kube-api-access-w7wpk" (OuterVolumeSpecName: "kube-api-access-w7wpk") pod "f7fc429c-671b-4085-ab07-8f4b2c53f496" (UID: "f7fc429c-671b-4085-ab07-8f4b2c53f496"). InnerVolumeSpecName "kube-api-access-w7wpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.264347 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7fc429c-671b-4085-ab07-8f4b2c53f496-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "f7fc429c-671b-4085-ab07-8f4b2c53f496" (UID: "f7fc429c-671b-4085-ab07-8f4b2c53f496"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.269019 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7fc429c-671b-4085-ab07-8f4b2c53f496-inventory" (OuterVolumeSpecName: "inventory") pod "f7fc429c-671b-4085-ab07-8f4b2c53f496" (UID: "f7fc429c-671b-4085-ab07-8f4b2c53f496"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.322321 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f7fc429c-671b-4085-ab07-8f4b2c53f496-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.322374 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7wpk\" (UniqueName: \"kubernetes.io/projected/f7fc429c-671b-4085-ab07-8f4b2c53f496-kube-api-access-w7wpk\") on node \"crc\" DevicePath \"\"" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.322402 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7fc429c-671b-4085-ab07-8f4b2c53f496-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.568418 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" event={"ID":"f7fc429c-671b-4085-ab07-8f4b2c53f496","Type":"ContainerDied","Data":"255d439a32fbefbcb406936cddf5a0d35b9951f6d34ccb4cd9be9f686bf9629e"} Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.568482 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="255d439a32fbefbcb406936cddf5a0d35b9951f6d34ccb4cd9be9f686bf9629e" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.568505 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.695772 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq"] Feb 02 23:07:19 crc kubenswrapper[4755]: E0202 23:07:19.696401 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7fc429c-671b-4085-ab07-8f4b2c53f496" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.696430 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7fc429c-671b-4085-ab07-8f4b2c53f496" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.697747 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7fc429c-671b-4085-ab07-8f4b2c53f496" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.699541 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.708400 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.708721 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.708871 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.708901 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.709305 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.709537 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.709804 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.710155 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.715076 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq"] Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.847387 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwfcc\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-kube-api-access-hwfcc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.847994 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.848404 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.848516 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.848626 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.848746 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.849011 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.849131 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.849252 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.849426 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.849572 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.849706 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.849848 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.849995 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952437 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952506 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952543 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952618 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwfcc\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-kube-api-access-hwfcc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952652 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952696 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952716 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952757 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952784 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952820 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952842 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952859 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952886 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.952912 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.959650 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.960058 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.960932 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.961049 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.961050 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.961105 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.961104 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.961260 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.963051 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.963218 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.963316 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.966316 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.970937 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:19 crc kubenswrapper[4755]: I0202 23:07:19.983440 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwfcc\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-kube-api-access-hwfcc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:20 crc kubenswrapper[4755]: I0202 23:07:20.068455 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:07:20 crc kubenswrapper[4755]: I0202 23:07:20.687156 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq"] Feb 02 23:07:21 crc kubenswrapper[4755]: I0202 23:07:21.591519 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" event={"ID":"50b91f5b-54be-49cb-bfff-e2d317db916e","Type":"ContainerStarted","Data":"99c0324a039247a67040b166cc8b7feaac0855d20a39390f52dd12a0cb411b66"} Feb 02 23:07:21 crc kubenswrapper[4755]: I0202 23:07:21.591872 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" event={"ID":"50b91f5b-54be-49cb-bfff-e2d317db916e","Type":"ContainerStarted","Data":"68f945dde397d075ba06bcd0ff8ff3288d9884ca7a6fb767ffd4153f4ee93a37"} Feb 02 23:07:21 crc kubenswrapper[4755]: I0202 23:07:21.628811 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" podStartSLOduration=2.163714345 podStartE2EDuration="2.628790722s" podCreationTimestamp="2026-02-02 23:07:19 +0000 UTC" firstStartedPulling="2026-02-02 23:07:20.708071982 +0000 UTC m=+1996.399292318" lastFinishedPulling="2026-02-02 23:07:21.173148359 +0000 UTC m=+1996.864368695" observedRunningTime="2026-02-02 23:07:21.616845297 +0000 UTC m=+1997.308065683" watchObservedRunningTime="2026-02-02 23:07:21.628790722 +0000 UTC m=+1997.320011048" Feb 02 23:07:36 crc kubenswrapper[4755]: I0202 23:07:36.082293 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-ltkhr"] Feb 02 23:07:36 crc kubenswrapper[4755]: I0202 23:07:36.095252 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-ltkhr"] Feb 02 23:07:37 crc kubenswrapper[4755]: I0202 23:07:37.088238 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="909d96ec-6e4a-4012-b7bf-bb0f95815443" path="/var/lib/kubelet/pods/909d96ec-6e4a-4012-b7bf-bb0f95815443/volumes" Feb 02 23:07:41 crc kubenswrapper[4755]: I0202 23:07:41.030681 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-6nq66"] Feb 02 23:07:41 crc kubenswrapper[4755]: I0202 23:07:41.040756 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-6nq66"] Feb 02 23:07:41 crc kubenswrapper[4755]: I0202 23:07:41.082483 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dadba444-a0a5-4b6f-8d3b-9c2ed25f1261" path="/var/lib/kubelet/pods/dadba444-a0a5-4b6f-8d3b-9c2ed25f1261/volumes" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.290941 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lzsnv"] Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.293927 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.300277 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lzsnv"] Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.361083 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84nq2\" (UniqueName: \"kubernetes.io/projected/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-kube-api-access-84nq2\") pod \"redhat-marketplace-lzsnv\" (UID: \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\") " pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.361202 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-utilities\") pod \"redhat-marketplace-lzsnv\" (UID: \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\") " pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.361349 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-catalog-content\") pod \"redhat-marketplace-lzsnv\" (UID: \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\") " pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.463727 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84nq2\" (UniqueName: \"kubernetes.io/projected/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-kube-api-access-84nq2\") pod \"redhat-marketplace-lzsnv\" (UID: \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\") " pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.463830 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-utilities\") pod \"redhat-marketplace-lzsnv\" (UID: \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\") " pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.463986 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-catalog-content\") pod \"redhat-marketplace-lzsnv\" (UID: \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\") " pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.464394 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-utilities\") pod \"redhat-marketplace-lzsnv\" (UID: \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\") " pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.464478 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-catalog-content\") pod \"redhat-marketplace-lzsnv\" (UID: \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\") " pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.483417 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84nq2\" (UniqueName: \"kubernetes.io/projected/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-kube-api-access-84nq2\") pod \"redhat-marketplace-lzsnv\" (UID: \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\") " pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.620546 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.825833 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vrt7r"] Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.829018 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.845530 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vrt7r"] Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.871084 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98021307-b099-4546-9be4-4c9910160dee-utilities\") pod \"redhat-operators-vrt7r\" (UID: \"98021307-b099-4546-9be4-4c9910160dee\") " pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.871125 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98021307-b099-4546-9be4-4c9910160dee-catalog-content\") pod \"redhat-operators-vrt7r\" (UID: \"98021307-b099-4546-9be4-4c9910160dee\") " pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.871146 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4jr5\" (UniqueName: \"kubernetes.io/projected/98021307-b099-4546-9be4-4c9910160dee-kube-api-access-t4jr5\") pod \"redhat-operators-vrt7r\" (UID: \"98021307-b099-4546-9be4-4c9910160dee\") " pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.973121 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98021307-b099-4546-9be4-4c9910160dee-utilities\") pod \"redhat-operators-vrt7r\" (UID: \"98021307-b099-4546-9be4-4c9910160dee\") " pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.973477 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98021307-b099-4546-9be4-4c9910160dee-catalog-content\") pod \"redhat-operators-vrt7r\" (UID: \"98021307-b099-4546-9be4-4c9910160dee\") " pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.973513 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4jr5\" (UniqueName: \"kubernetes.io/projected/98021307-b099-4546-9be4-4c9910160dee-kube-api-access-t4jr5\") pod \"redhat-operators-vrt7r\" (UID: \"98021307-b099-4546-9be4-4c9910160dee\") " pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.973675 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98021307-b099-4546-9be4-4c9910160dee-utilities\") pod \"redhat-operators-vrt7r\" (UID: \"98021307-b099-4546-9be4-4c9910160dee\") " pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.974126 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98021307-b099-4546-9be4-4c9910160dee-catalog-content\") pod \"redhat-operators-vrt7r\" (UID: \"98021307-b099-4546-9be4-4c9910160dee\") " pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:42 crc kubenswrapper[4755]: I0202 23:07:42.992265 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4jr5\" (UniqueName: \"kubernetes.io/projected/98021307-b099-4546-9be4-4c9910160dee-kube-api-access-t4jr5\") pod \"redhat-operators-vrt7r\" (UID: \"98021307-b099-4546-9be4-4c9910160dee\") " pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:43 crc kubenswrapper[4755]: I0202 23:07:43.084391 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lzsnv"] Feb 02 23:07:43 crc kubenswrapper[4755]: I0202 23:07:43.164727 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:43 crc kubenswrapper[4755]: I0202 23:07:43.337664 4755 generic.go:334] "Generic (PLEG): container finished" podID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" containerID="e98488457f2d82bb9073a4f305c8dc4bddf55b6a7d758ba75ac5770fe0231f6a" exitCode=0 Feb 02 23:07:43 crc kubenswrapper[4755]: I0202 23:07:43.337865 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lzsnv" event={"ID":"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855","Type":"ContainerDied","Data":"e98488457f2d82bb9073a4f305c8dc4bddf55b6a7d758ba75ac5770fe0231f6a"} Feb 02 23:07:43 crc kubenswrapper[4755]: I0202 23:07:43.338542 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lzsnv" event={"ID":"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855","Type":"ContainerStarted","Data":"df14e30650b4d7b2342d034fe2a14fbfb46b06341c6c08b2f3c9fd8d2784f714"} Feb 02 23:07:43 crc kubenswrapper[4755]: I0202 23:07:43.711396 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vrt7r"] Feb 02 23:07:44 crc kubenswrapper[4755]: I0202 23:07:44.348513 4755 generic.go:334] "Generic (PLEG): container finished" podID="98021307-b099-4546-9be4-4c9910160dee" containerID="f730505a6dbedea19e24141fb4041c090a9598a799328a0a9dea86c3e2385b3c" exitCode=0 Feb 02 23:07:44 crc kubenswrapper[4755]: I0202 23:07:44.348614 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrt7r" event={"ID":"98021307-b099-4546-9be4-4c9910160dee","Type":"ContainerDied","Data":"f730505a6dbedea19e24141fb4041c090a9598a799328a0a9dea86c3e2385b3c"} Feb 02 23:07:44 crc kubenswrapper[4755]: I0202 23:07:44.349221 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrt7r" event={"ID":"98021307-b099-4546-9be4-4c9910160dee","Type":"ContainerStarted","Data":"db8ea09a30c76b7acf9eca28f0ac5bb37a437b43f0821a77914672b98939944e"} Feb 02 23:07:44 crc kubenswrapper[4755]: I0202 23:07:44.351690 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lzsnv" event={"ID":"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855","Type":"ContainerStarted","Data":"79e5da38bf8d08c56ed10c2e9c1e9a02a85f14a2152db0fcf2075e1beb815d77"} Feb 02 23:07:45 crc kubenswrapper[4755]: I0202 23:07:45.362801 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrt7r" event={"ID":"98021307-b099-4546-9be4-4c9910160dee","Type":"ContainerStarted","Data":"c648f3ddce1439cf96c0cf86777425b73ba9b6dbcc2cfaccf3bcecacd0a979c3"} Feb 02 23:07:45 crc kubenswrapper[4755]: I0202 23:07:45.371125 4755 generic.go:334] "Generic (PLEG): container finished" podID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" containerID="79e5da38bf8d08c56ed10c2e9c1e9a02a85f14a2152db0fcf2075e1beb815d77" exitCode=0 Feb 02 23:07:45 crc kubenswrapper[4755]: I0202 23:07:45.371210 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lzsnv" event={"ID":"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855","Type":"ContainerDied","Data":"79e5da38bf8d08c56ed10c2e9c1e9a02a85f14a2152db0fcf2075e1beb815d77"} Feb 02 23:07:46 crc kubenswrapper[4755]: I0202 23:07:46.384687 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lzsnv" event={"ID":"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855","Type":"ContainerStarted","Data":"cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535"} Feb 02 23:07:46 crc kubenswrapper[4755]: I0202 23:07:46.426946 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lzsnv" podStartSLOduration=1.988596667 podStartE2EDuration="4.42692838s" podCreationTimestamp="2026-02-02 23:07:42 +0000 UTC" firstStartedPulling="2026-02-02 23:07:43.341235961 +0000 UTC m=+2019.032456287" lastFinishedPulling="2026-02-02 23:07:45.779567674 +0000 UTC m=+2021.470788000" observedRunningTime="2026-02-02 23:07:46.422047153 +0000 UTC m=+2022.113267469" watchObservedRunningTime="2026-02-02 23:07:46.42692838 +0000 UTC m=+2022.118148706" Feb 02 23:07:50 crc kubenswrapper[4755]: I0202 23:07:50.426631 4755 generic.go:334] "Generic (PLEG): container finished" podID="98021307-b099-4546-9be4-4c9910160dee" containerID="c648f3ddce1439cf96c0cf86777425b73ba9b6dbcc2cfaccf3bcecacd0a979c3" exitCode=0 Feb 02 23:07:50 crc kubenswrapper[4755]: I0202 23:07:50.426701 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrt7r" event={"ID":"98021307-b099-4546-9be4-4c9910160dee","Type":"ContainerDied","Data":"c648f3ddce1439cf96c0cf86777425b73ba9b6dbcc2cfaccf3bcecacd0a979c3"} Feb 02 23:07:51 crc kubenswrapper[4755]: I0202 23:07:51.449282 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrt7r" event={"ID":"98021307-b099-4546-9be4-4c9910160dee","Type":"ContainerStarted","Data":"cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919"} Feb 02 23:07:51 crc kubenswrapper[4755]: I0202 23:07:51.484301 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vrt7r" podStartSLOduration=3.0045223 podStartE2EDuration="9.484280765s" podCreationTimestamp="2026-02-02 23:07:42 +0000 UTC" firstStartedPulling="2026-02-02 23:07:44.350053365 +0000 UTC m=+2020.041273681" lastFinishedPulling="2026-02-02 23:07:50.82981181 +0000 UTC m=+2026.521032146" observedRunningTime="2026-02-02 23:07:51.475129778 +0000 UTC m=+2027.166350104" watchObservedRunningTime="2026-02-02 23:07:51.484280765 +0000 UTC m=+2027.175501091" Feb 02 23:07:52 crc kubenswrapper[4755]: I0202 23:07:52.621692 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:52 crc kubenswrapper[4755]: I0202 23:07:52.622146 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:07:53 crc kubenswrapper[4755]: I0202 23:07:53.165343 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:53 crc kubenswrapper[4755]: I0202 23:07:53.165411 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:07:53 crc kubenswrapper[4755]: I0202 23:07:53.390078 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:07:53 crc kubenswrapper[4755]: I0202 23:07:53.390157 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:07:53 crc kubenswrapper[4755]: I0202 23:07:53.674889 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-lzsnv" podUID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" containerName="registry-server" probeResult="failure" output=< Feb 02 23:07:53 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Feb 02 23:07:53 crc kubenswrapper[4755]: > Feb 02 23:07:54 crc kubenswrapper[4755]: I0202 23:07:54.240165 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vrt7r" podUID="98021307-b099-4546-9be4-4c9910160dee" containerName="registry-server" probeResult="failure" output=< Feb 02 23:07:54 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Feb 02 23:07:54 crc kubenswrapper[4755]: > Feb 02 23:08:01 crc kubenswrapper[4755]: I0202 23:08:01.573540 4755 generic.go:334] "Generic (PLEG): container finished" podID="50b91f5b-54be-49cb-bfff-e2d317db916e" containerID="99c0324a039247a67040b166cc8b7feaac0855d20a39390f52dd12a0cb411b66" exitCode=0 Feb 02 23:08:01 crc kubenswrapper[4755]: I0202 23:08:01.573815 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" event={"ID":"50b91f5b-54be-49cb-bfff-e2d317db916e","Type":"ContainerDied","Data":"99c0324a039247a67040b166cc8b7feaac0855d20a39390f52dd12a0cb411b66"} Feb 02 23:08:02 crc kubenswrapper[4755]: I0202 23:08:02.712021 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:08:02 crc kubenswrapper[4755]: I0202 23:08:02.778408 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.119077 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.221260 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.233311 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-inventory\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.233405 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-telemetry-combined-ca-bundle\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.233479 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-bootstrap-combined-ca-bundle\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.233602 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.233635 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-neutron-metadata-combined-ca-bundle\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.233667 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-nova-combined-ca-bundle\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.233715 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwfcc\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-kube-api-access-hwfcc\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.233789 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.233820 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.233848 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-repo-setup-combined-ca-bundle\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.233909 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.233952 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-ssh-key-openstack-edpm-ipam\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.234008 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-ovn-combined-ca-bundle\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.234039 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-libvirt-combined-ca-bundle\") pod \"50b91f5b-54be-49cb-bfff-e2d317db916e\" (UID: \"50b91f5b-54be-49cb-bfff-e2d317db916e\") " Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.247417 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.247545 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.247901 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.248042 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.248085 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-kube-api-access-hwfcc" (OuterVolumeSpecName: "kube-api-access-hwfcc") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "kube-api-access-hwfcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.248181 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.248860 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.251663 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.254969 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.257051 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.258348 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.261963 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.283671 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.294743 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-inventory" (OuterVolumeSpecName: "inventory") pod "50b91f5b-54be-49cb-bfff-e2d317db916e" (UID: "50b91f5b-54be-49cb-bfff-e2d317db916e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.305869 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339572 4755 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339615 4755 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339635 4755 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339652 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwfcc\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-kube-api-access-hwfcc\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339669 4755 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339688 4755 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339704 4755 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339720 4755 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/50b91f5b-54be-49cb-bfff-e2d317db916e-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339760 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339778 4755 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339792 4755 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339808 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339823 4755 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.339838 4755 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50b91f5b-54be-49cb-bfff-e2d317db916e-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.598626 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" event={"ID":"50b91f5b-54be-49cb-bfff-e2d317db916e","Type":"ContainerDied","Data":"68f945dde397d075ba06bcd0ff8ff3288d9884ca7a6fb767ffd4153f4ee93a37"} Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.599320 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68f945dde397d075ba06bcd0ff8ff3288d9884ca7a6fb767ffd4153f4ee93a37" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.599049 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.747201 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt"] Feb 02 23:08:03 crc kubenswrapper[4755]: E0202 23:08:03.747921 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50b91f5b-54be-49cb-bfff-e2d317db916e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.747950 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="50b91f5b-54be-49cb-bfff-e2d317db916e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.748334 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="50b91f5b-54be-49cb-bfff-e2d317db916e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.749579 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.752161 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.752383 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.752541 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.757559 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.757684 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.761574 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt"] Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.850103 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.850146 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f85cq\" (UniqueName: \"kubernetes.io/projected/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-kube-api-access-f85cq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.850229 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.850394 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.850413 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.952235 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.952492 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.952523 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.952557 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.952589 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f85cq\" (UniqueName: \"kubernetes.io/projected/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-kube-api-access-f85cq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.953115 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.959357 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.959473 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.965371 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:03 crc kubenswrapper[4755]: I0202 23:08:03.980363 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f85cq\" (UniqueName: \"kubernetes.io/projected/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-kube-api-access-f85cq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-z6lpt\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:04 crc kubenswrapper[4755]: I0202 23:08:04.093944 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:08:04 crc kubenswrapper[4755]: I0202 23:08:04.688620 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt"] Feb 02 23:08:04 crc kubenswrapper[4755]: W0202 23:08:04.699930 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9ce0f10_bc90_4f02_8c98_0b1e054c026f.slice/crio-f4f473089cba5f00450a0ad26a5c2a343e0d0098d8895b6e8d9269583043c70b WatchSource:0}: Error finding container f4f473089cba5f00450a0ad26a5c2a343e0d0098d8895b6e8d9269583043c70b: Status 404 returned error can't find the container with id f4f473089cba5f00450a0ad26a5c2a343e0d0098d8895b6e8d9269583043c70b Feb 02 23:08:05 crc kubenswrapper[4755]: I0202 23:08:05.131686 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:08:05 crc kubenswrapper[4755]: I0202 23:08:05.622182 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" event={"ID":"d9ce0f10-bc90-4f02-8c98-0b1e054c026f","Type":"ContainerStarted","Data":"b2e646ba02fffbbcbe4b7123af03d41a34362d0a9a24a80dcea29a4b67d2ffe4"} Feb 02 23:08:05 crc kubenswrapper[4755]: I0202 23:08:05.622762 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" event={"ID":"d9ce0f10-bc90-4f02-8c98-0b1e054c026f","Type":"ContainerStarted","Data":"f4f473089cba5f00450a0ad26a5c2a343e0d0098d8895b6e8d9269583043c70b"} Feb 02 23:08:05 crc kubenswrapper[4755]: I0202 23:08:05.646671 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" podStartSLOduration=2.224599008 podStartE2EDuration="2.646568779s" podCreationTimestamp="2026-02-02 23:08:03 +0000 UTC" firstStartedPulling="2026-02-02 23:08:04.704103489 +0000 UTC m=+2040.395323825" lastFinishedPulling="2026-02-02 23:08:05.12607325 +0000 UTC m=+2040.817293596" observedRunningTime="2026-02-02 23:08:05.644537922 +0000 UTC m=+2041.335758248" watchObservedRunningTime="2026-02-02 23:08:05.646568779 +0000 UTC m=+2041.337789105" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.025886 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vrt7r"] Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.026629 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vrt7r" podUID="98021307-b099-4546-9be4-4c9910160dee" containerName="registry-server" containerID="cri-o://cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919" gracePeriod=2 Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.603614 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.644163 4755 generic.go:334] "Generic (PLEG): container finished" podID="98021307-b099-4546-9be4-4c9910160dee" containerID="cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919" exitCode=0 Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.644440 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrt7r" event={"ID":"98021307-b099-4546-9be4-4c9910160dee","Type":"ContainerDied","Data":"cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919"} Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.644539 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrt7r" event={"ID":"98021307-b099-4546-9be4-4c9910160dee","Type":"ContainerDied","Data":"db8ea09a30c76b7acf9eca28f0ac5bb37a437b43f0821a77914672b98939944e"} Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.644627 4755 scope.go:117] "RemoveContainer" containerID="cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.644875 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vrt7r" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.679412 4755 scope.go:117] "RemoveContainer" containerID="c648f3ddce1439cf96c0cf86777425b73ba9b6dbcc2cfaccf3bcecacd0a979c3" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.716016 4755 scope.go:117] "RemoveContainer" containerID="f730505a6dbedea19e24141fb4041c090a9598a799328a0a9dea86c3e2385b3c" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.736220 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98021307-b099-4546-9be4-4c9910160dee-catalog-content\") pod \"98021307-b099-4546-9be4-4c9910160dee\" (UID: \"98021307-b099-4546-9be4-4c9910160dee\") " Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.736476 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98021307-b099-4546-9be4-4c9910160dee-utilities\") pod \"98021307-b099-4546-9be4-4c9910160dee\" (UID: \"98021307-b099-4546-9be4-4c9910160dee\") " Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.736631 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4jr5\" (UniqueName: \"kubernetes.io/projected/98021307-b099-4546-9be4-4c9910160dee-kube-api-access-t4jr5\") pod \"98021307-b099-4546-9be4-4c9910160dee\" (UID: \"98021307-b099-4546-9be4-4c9910160dee\") " Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.738919 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98021307-b099-4546-9be4-4c9910160dee-utilities" (OuterVolumeSpecName: "utilities") pod "98021307-b099-4546-9be4-4c9910160dee" (UID: "98021307-b099-4546-9be4-4c9910160dee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.746509 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98021307-b099-4546-9be4-4c9910160dee-kube-api-access-t4jr5" (OuterVolumeSpecName: "kube-api-access-t4jr5") pod "98021307-b099-4546-9be4-4c9910160dee" (UID: "98021307-b099-4546-9be4-4c9910160dee"). InnerVolumeSpecName "kube-api-access-t4jr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.820490 4755 scope.go:117] "RemoveContainer" containerID="cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919" Feb 02 23:08:07 crc kubenswrapper[4755]: E0202 23:08:07.823288 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919\": container with ID starting with cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919 not found: ID does not exist" containerID="cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.823326 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919"} err="failed to get container status \"cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919\": rpc error: code = NotFound desc = could not find container \"cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919\": container with ID starting with cf1ed359bd8b519734735936e244014adb9b078cd33cb1c0fff9007724283919 not found: ID does not exist" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.823349 4755 scope.go:117] "RemoveContainer" containerID="c648f3ddce1439cf96c0cf86777425b73ba9b6dbcc2cfaccf3bcecacd0a979c3" Feb 02 23:08:07 crc kubenswrapper[4755]: E0202 23:08:07.823962 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c648f3ddce1439cf96c0cf86777425b73ba9b6dbcc2cfaccf3bcecacd0a979c3\": container with ID starting with c648f3ddce1439cf96c0cf86777425b73ba9b6dbcc2cfaccf3bcecacd0a979c3 not found: ID does not exist" containerID="c648f3ddce1439cf96c0cf86777425b73ba9b6dbcc2cfaccf3bcecacd0a979c3" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.824011 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c648f3ddce1439cf96c0cf86777425b73ba9b6dbcc2cfaccf3bcecacd0a979c3"} err="failed to get container status \"c648f3ddce1439cf96c0cf86777425b73ba9b6dbcc2cfaccf3bcecacd0a979c3\": rpc error: code = NotFound desc = could not find container \"c648f3ddce1439cf96c0cf86777425b73ba9b6dbcc2cfaccf3bcecacd0a979c3\": container with ID starting with c648f3ddce1439cf96c0cf86777425b73ba9b6dbcc2cfaccf3bcecacd0a979c3 not found: ID does not exist" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.824048 4755 scope.go:117] "RemoveContainer" containerID="f730505a6dbedea19e24141fb4041c090a9598a799328a0a9dea86c3e2385b3c" Feb 02 23:08:07 crc kubenswrapper[4755]: E0202 23:08:07.824431 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f730505a6dbedea19e24141fb4041c090a9598a799328a0a9dea86c3e2385b3c\": container with ID starting with f730505a6dbedea19e24141fb4041c090a9598a799328a0a9dea86c3e2385b3c not found: ID does not exist" containerID="f730505a6dbedea19e24141fb4041c090a9598a799328a0a9dea86c3e2385b3c" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.824453 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f730505a6dbedea19e24141fb4041c090a9598a799328a0a9dea86c3e2385b3c"} err="failed to get container status \"f730505a6dbedea19e24141fb4041c090a9598a799328a0a9dea86c3e2385b3c\": rpc error: code = NotFound desc = could not find container \"f730505a6dbedea19e24141fb4041c090a9598a799328a0a9dea86c3e2385b3c\": container with ID starting with f730505a6dbedea19e24141fb4041c090a9598a799328a0a9dea86c3e2385b3c not found: ID does not exist" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.839165 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98021307-b099-4546-9be4-4c9910160dee-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.839189 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4jr5\" (UniqueName: \"kubernetes.io/projected/98021307-b099-4546-9be4-4c9910160dee-kube-api-access-t4jr5\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.855892 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98021307-b099-4546-9be4-4c9910160dee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "98021307-b099-4546-9be4-4c9910160dee" (UID: "98021307-b099-4546-9be4-4c9910160dee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.941024 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98021307-b099-4546-9be4-4c9910160dee-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:07 crc kubenswrapper[4755]: I0202 23:08:07.989164 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vrt7r"] Feb 02 23:08:08 crc kubenswrapper[4755]: I0202 23:08:08.002013 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vrt7r"] Feb 02 23:08:09 crc kubenswrapper[4755]: I0202 23:08:09.110025 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98021307-b099-4546-9be4-4c9910160dee" path="/var/lib/kubelet/pods/98021307-b099-4546-9be4-4c9910160dee/volumes" Feb 02 23:08:09 crc kubenswrapper[4755]: I0202 23:08:09.811612 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lzsnv"] Feb 02 23:08:09 crc kubenswrapper[4755]: I0202 23:08:09.812295 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lzsnv" podUID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" containerName="registry-server" containerID="cri-o://cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535" gracePeriod=2 Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.363167 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.491959 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-utilities\") pod \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\" (UID: \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\") " Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.492300 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84nq2\" (UniqueName: \"kubernetes.io/projected/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-kube-api-access-84nq2\") pod \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\" (UID: \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\") " Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.492404 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-catalog-content\") pod \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\" (UID: \"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855\") " Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.492468 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-utilities" (OuterVolumeSpecName: "utilities") pod "28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" (UID: "28fbc970-ee8c-43bc-b5ea-5e6e7a86a855"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.492966 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.500628 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-kube-api-access-84nq2" (OuterVolumeSpecName: "kube-api-access-84nq2") pod "28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" (UID: "28fbc970-ee8c-43bc-b5ea-5e6e7a86a855"). InnerVolumeSpecName "kube-api-access-84nq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.511377 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" (UID: "28fbc970-ee8c-43bc-b5ea-5e6e7a86a855"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.595197 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.595235 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84nq2\" (UniqueName: \"kubernetes.io/projected/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855-kube-api-access-84nq2\") on node \"crc\" DevicePath \"\"" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.680557 4755 generic.go:334] "Generic (PLEG): container finished" podID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" containerID="cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535" exitCode=0 Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.680602 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lzsnv" event={"ID":"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855","Type":"ContainerDied","Data":"cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535"} Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.680632 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lzsnv" event={"ID":"28fbc970-ee8c-43bc-b5ea-5e6e7a86a855","Type":"ContainerDied","Data":"df14e30650b4d7b2342d034fe2a14fbfb46b06341c6c08b2f3c9fd8d2784f714"} Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.680656 4755 scope.go:117] "RemoveContainer" containerID="cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.680816 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lzsnv" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.724226 4755 scope.go:117] "RemoveContainer" containerID="79e5da38bf8d08c56ed10c2e9c1e9a02a85f14a2152db0fcf2075e1beb815d77" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.730780 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lzsnv"] Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.750848 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lzsnv"] Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.755640 4755 scope.go:117] "RemoveContainer" containerID="e98488457f2d82bb9073a4f305c8dc4bddf55b6a7d758ba75ac5770fe0231f6a" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.809797 4755 scope.go:117] "RemoveContainer" containerID="cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535" Feb 02 23:08:10 crc kubenswrapper[4755]: E0202 23:08:10.810244 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535\": container with ID starting with cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535 not found: ID does not exist" containerID="cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.810292 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535"} err="failed to get container status \"cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535\": rpc error: code = NotFound desc = could not find container \"cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535\": container with ID starting with cd706975731ac48445b678eae864382b49c1ea268aa534484eac2a042d8a9535 not found: ID does not exist" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.810314 4755 scope.go:117] "RemoveContainer" containerID="79e5da38bf8d08c56ed10c2e9c1e9a02a85f14a2152db0fcf2075e1beb815d77" Feb 02 23:08:10 crc kubenswrapper[4755]: E0202 23:08:10.810641 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79e5da38bf8d08c56ed10c2e9c1e9a02a85f14a2152db0fcf2075e1beb815d77\": container with ID starting with 79e5da38bf8d08c56ed10c2e9c1e9a02a85f14a2152db0fcf2075e1beb815d77 not found: ID does not exist" containerID="79e5da38bf8d08c56ed10c2e9c1e9a02a85f14a2152db0fcf2075e1beb815d77" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.810703 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79e5da38bf8d08c56ed10c2e9c1e9a02a85f14a2152db0fcf2075e1beb815d77"} err="failed to get container status \"79e5da38bf8d08c56ed10c2e9c1e9a02a85f14a2152db0fcf2075e1beb815d77\": rpc error: code = NotFound desc = could not find container \"79e5da38bf8d08c56ed10c2e9c1e9a02a85f14a2152db0fcf2075e1beb815d77\": container with ID starting with 79e5da38bf8d08c56ed10c2e9c1e9a02a85f14a2152db0fcf2075e1beb815d77 not found: ID does not exist" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.810786 4755 scope.go:117] "RemoveContainer" containerID="e98488457f2d82bb9073a4f305c8dc4bddf55b6a7d758ba75ac5770fe0231f6a" Feb 02 23:08:10 crc kubenswrapper[4755]: E0202 23:08:10.811117 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e98488457f2d82bb9073a4f305c8dc4bddf55b6a7d758ba75ac5770fe0231f6a\": container with ID starting with e98488457f2d82bb9073a4f305c8dc4bddf55b6a7d758ba75ac5770fe0231f6a not found: ID does not exist" containerID="e98488457f2d82bb9073a4f305c8dc4bddf55b6a7d758ba75ac5770fe0231f6a" Feb 02 23:08:10 crc kubenswrapper[4755]: I0202 23:08:10.811143 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e98488457f2d82bb9073a4f305c8dc4bddf55b6a7d758ba75ac5770fe0231f6a"} err="failed to get container status \"e98488457f2d82bb9073a4f305c8dc4bddf55b6a7d758ba75ac5770fe0231f6a\": rpc error: code = NotFound desc = could not find container \"e98488457f2d82bb9073a4f305c8dc4bddf55b6a7d758ba75ac5770fe0231f6a\": container with ID starting with e98488457f2d82bb9073a4f305c8dc4bddf55b6a7d758ba75ac5770fe0231f6a not found: ID does not exist" Feb 02 23:08:11 crc kubenswrapper[4755]: I0202 23:08:11.091485 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" path="/var/lib/kubelet/pods/28fbc970-ee8c-43bc-b5ea-5e6e7a86a855/volumes" Feb 02 23:08:11 crc kubenswrapper[4755]: I0202 23:08:11.527421 4755 scope.go:117] "RemoveContainer" containerID="dde1a5ceead556147f68a30fad2b43052ba2245d8afb407b9a6768c5410426d7" Feb 02 23:08:11 crc kubenswrapper[4755]: I0202 23:08:11.572915 4755 scope.go:117] "RemoveContainer" containerID="d06a9fe7a85e3048a48fdd4742e37222e3b98e2591e4985cc68572ab8522c6a8" Feb 02 23:08:23 crc kubenswrapper[4755]: I0202 23:08:23.389048 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:08:23 crc kubenswrapper[4755]: I0202 23:08:23.390401 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:08:53 crc kubenswrapper[4755]: I0202 23:08:53.389644 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:08:53 crc kubenswrapper[4755]: I0202 23:08:53.390241 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:08:53 crc kubenswrapper[4755]: I0202 23:08:53.390282 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 23:08:53 crc kubenswrapper[4755]: I0202 23:08:53.391023 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"30bad18ffb9e582a32f758848770c077d9c7b99e4ba3e8c0b3b77128686a49e6"} pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 23:08:53 crc kubenswrapper[4755]: I0202 23:08:53.391070 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" containerID="cri-o://30bad18ffb9e582a32f758848770c077d9c7b99e4ba3e8c0b3b77128686a49e6" gracePeriod=600 Feb 02 23:08:54 crc kubenswrapper[4755]: I0202 23:08:54.239389 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerID="30bad18ffb9e582a32f758848770c077d9c7b99e4ba3e8c0b3b77128686a49e6" exitCode=0 Feb 02 23:08:54 crc kubenswrapper[4755]: I0202 23:08:54.240174 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerDied","Data":"30bad18ffb9e582a32f758848770c077d9c7b99e4ba3e8c0b3b77128686a49e6"} Feb 02 23:08:54 crc kubenswrapper[4755]: I0202 23:08:54.240228 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089"} Feb 02 23:08:54 crc kubenswrapper[4755]: I0202 23:08:54.240258 4755 scope.go:117] "RemoveContainer" containerID="335394c10882d4913e90829d810a34a29f76c9ba19b1c9aa040aeabd88248e63" Feb 02 23:09:12 crc kubenswrapper[4755]: I0202 23:09:12.458931 4755 generic.go:334] "Generic (PLEG): container finished" podID="d9ce0f10-bc90-4f02-8c98-0b1e054c026f" containerID="b2e646ba02fffbbcbe4b7123af03d41a34362d0a9a24a80dcea29a4b67d2ffe4" exitCode=0 Feb 02 23:09:12 crc kubenswrapper[4755]: I0202 23:09:12.459038 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" event={"ID":"d9ce0f10-bc90-4f02-8c98-0b1e054c026f","Type":"ContainerDied","Data":"b2e646ba02fffbbcbe4b7123af03d41a34362d0a9a24a80dcea29a4b67d2ffe4"} Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.005436 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.069657 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ssh-key-openstack-edpm-ipam\") pod \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.069839 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ovn-combined-ca-bundle\") pod \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.069911 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-inventory\") pod \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.069951 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ovncontroller-config-0\") pod \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.070124 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f85cq\" (UniqueName: \"kubernetes.io/projected/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-kube-api-access-f85cq\") pod \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\" (UID: \"d9ce0f10-bc90-4f02-8c98-0b1e054c026f\") " Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.077061 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "d9ce0f10-bc90-4f02-8c98-0b1e054c026f" (UID: "d9ce0f10-bc90-4f02-8c98-0b1e054c026f"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.095552 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-kube-api-access-f85cq" (OuterVolumeSpecName: "kube-api-access-f85cq") pod "d9ce0f10-bc90-4f02-8c98-0b1e054c026f" (UID: "d9ce0f10-bc90-4f02-8c98-0b1e054c026f"). InnerVolumeSpecName "kube-api-access-f85cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.102911 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "d9ce0f10-bc90-4f02-8c98-0b1e054c026f" (UID: "d9ce0f10-bc90-4f02-8c98-0b1e054c026f"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.107871 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "d9ce0f10-bc90-4f02-8c98-0b1e054c026f" (UID: "d9ce0f10-bc90-4f02-8c98-0b1e054c026f"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.115952 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-inventory" (OuterVolumeSpecName: "inventory") pod "d9ce0f10-bc90-4f02-8c98-0b1e054c026f" (UID: "d9ce0f10-bc90-4f02-8c98-0b1e054c026f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.173682 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f85cq\" (UniqueName: \"kubernetes.io/projected/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-kube-api-access-f85cq\") on node \"crc\" DevicePath \"\"" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.173787 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.173812 4755 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.173831 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.173850 4755 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d9ce0f10-bc90-4f02-8c98-0b1e054c026f-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.483026 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" event={"ID":"d9ce0f10-bc90-4f02-8c98-0b1e054c026f","Type":"ContainerDied","Data":"f4f473089cba5f00450a0ad26a5c2a343e0d0098d8895b6e8d9269583043c70b"} Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.483066 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4f473089cba5f00450a0ad26a5c2a343e0d0098d8895b6e8d9269583043c70b" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.483119 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-z6lpt" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.606543 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n"] Feb 02 23:09:14 crc kubenswrapper[4755]: E0202 23:09:14.607080 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9ce0f10-bc90-4f02-8c98-0b1e054c026f" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.607103 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9ce0f10-bc90-4f02-8c98-0b1e054c026f" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Feb 02 23:09:14 crc kubenswrapper[4755]: E0202 23:09:14.607128 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" containerName="extract-utilities" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.607138 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" containerName="extract-utilities" Feb 02 23:09:14 crc kubenswrapper[4755]: E0202 23:09:14.607156 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" containerName="registry-server" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.607165 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" containerName="registry-server" Feb 02 23:09:14 crc kubenswrapper[4755]: E0202 23:09:14.607189 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" containerName="extract-content" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.607197 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" containerName="extract-content" Feb 02 23:09:14 crc kubenswrapper[4755]: E0202 23:09:14.607212 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98021307-b099-4546-9be4-4c9910160dee" containerName="registry-server" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.607221 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="98021307-b099-4546-9be4-4c9910160dee" containerName="registry-server" Feb 02 23:09:14 crc kubenswrapper[4755]: E0202 23:09:14.607244 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98021307-b099-4546-9be4-4c9910160dee" containerName="extract-content" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.607252 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="98021307-b099-4546-9be4-4c9910160dee" containerName="extract-content" Feb 02 23:09:14 crc kubenswrapper[4755]: E0202 23:09:14.607275 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98021307-b099-4546-9be4-4c9910160dee" containerName="extract-utilities" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.607284 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="98021307-b099-4546-9be4-4c9910160dee" containerName="extract-utilities" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.607573 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="28fbc970-ee8c-43bc-b5ea-5e6e7a86a855" containerName="registry-server" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.607608 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9ce0f10-bc90-4f02-8c98-0b1e054c026f" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.607631 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="98021307-b099-4546-9be4-4c9910160dee" containerName="registry-server" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.608561 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.615185 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.615250 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.615251 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.615577 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.615685 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.615801 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.624375 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n"] Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.686068 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.686414 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.686525 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.686675 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwz87\" (UniqueName: \"kubernetes.io/projected/fb61c726-e017-45d6-a3d2-883f93e04eb8-kube-api-access-jwz87\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.686810 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.686928 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.789214 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.789263 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.789292 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwz87\" (UniqueName: \"kubernetes.io/projected/fb61c726-e017-45d6-a3d2-883f93e04eb8-kube-api-access-jwz87\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.789314 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.789353 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.789464 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.794981 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.795612 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.796227 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.804291 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.815407 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.850783 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwz87\" (UniqueName: \"kubernetes.io/projected/fb61c726-e017-45d6-a3d2-883f93e04eb8-kube-api-access-jwz87\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:14 crc kubenswrapper[4755]: I0202 23:09:14.928359 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:09:15 crc kubenswrapper[4755]: W0202 23:09:15.496052 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb61c726_e017_45d6_a3d2_883f93e04eb8.slice/crio-88fc5cd444b9df9cea93d2d14323d2fceb23366d263acd7263cb76a4497aa27a WatchSource:0}: Error finding container 88fc5cd444b9df9cea93d2d14323d2fceb23366d263acd7263cb76a4497aa27a: Status 404 returned error can't find the container with id 88fc5cd444b9df9cea93d2d14323d2fceb23366d263acd7263cb76a4497aa27a Feb 02 23:09:15 crc kubenswrapper[4755]: I0202 23:09:15.496863 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n"] Feb 02 23:09:16 crc kubenswrapper[4755]: I0202 23:09:16.502407 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" event={"ID":"fb61c726-e017-45d6-a3d2-883f93e04eb8","Type":"ContainerStarted","Data":"3425ee520fd9c4873bdcb626a6715737912ac3b3242830f8252815637add3b44"} Feb 02 23:09:16 crc kubenswrapper[4755]: I0202 23:09:16.503105 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" event={"ID":"fb61c726-e017-45d6-a3d2-883f93e04eb8","Type":"ContainerStarted","Data":"88fc5cd444b9df9cea93d2d14323d2fceb23366d263acd7263cb76a4497aa27a"} Feb 02 23:09:16 crc kubenswrapper[4755]: I0202 23:09:16.537981 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" podStartSLOduration=2.05140077 podStartE2EDuration="2.537959265s" podCreationTimestamp="2026-02-02 23:09:14 +0000 UTC" firstStartedPulling="2026-02-02 23:09:15.499062135 +0000 UTC m=+2111.190282461" lastFinishedPulling="2026-02-02 23:09:15.98562064 +0000 UTC m=+2111.676840956" observedRunningTime="2026-02-02 23:09:16.51681885 +0000 UTC m=+2112.208039186" watchObservedRunningTime="2026-02-02 23:09:16.537959265 +0000 UTC m=+2112.229179611" Feb 02 23:10:06 crc kubenswrapper[4755]: I0202 23:10:06.122098 4755 generic.go:334] "Generic (PLEG): container finished" podID="fb61c726-e017-45d6-a3d2-883f93e04eb8" containerID="3425ee520fd9c4873bdcb626a6715737912ac3b3242830f8252815637add3b44" exitCode=0 Feb 02 23:10:06 crc kubenswrapper[4755]: I0202 23:10:06.122212 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" event={"ID":"fb61c726-e017-45d6-a3d2-883f93e04eb8","Type":"ContainerDied","Data":"3425ee520fd9c4873bdcb626a6715737912ac3b3242830f8252815637add3b44"} Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.746552 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.828315 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"fb61c726-e017-45d6-a3d2-883f93e04eb8\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.828900 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwz87\" (UniqueName: \"kubernetes.io/projected/fb61c726-e017-45d6-a3d2-883f93e04eb8-kube-api-access-jwz87\") pod \"fb61c726-e017-45d6-a3d2-883f93e04eb8\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.829147 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-inventory\") pod \"fb61c726-e017-45d6-a3d2-883f93e04eb8\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.829344 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-neutron-metadata-combined-ca-bundle\") pod \"fb61c726-e017-45d6-a3d2-883f93e04eb8\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.829663 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-ssh-key-openstack-edpm-ipam\") pod \"fb61c726-e017-45d6-a3d2-883f93e04eb8\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.829901 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-nova-metadata-neutron-config-0\") pod \"fb61c726-e017-45d6-a3d2-883f93e04eb8\" (UID: \"fb61c726-e017-45d6-a3d2-883f93e04eb8\") " Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.834577 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "fb61c726-e017-45d6-a3d2-883f93e04eb8" (UID: "fb61c726-e017-45d6-a3d2-883f93e04eb8"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.835009 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb61c726-e017-45d6-a3d2-883f93e04eb8-kube-api-access-jwz87" (OuterVolumeSpecName: "kube-api-access-jwz87") pod "fb61c726-e017-45d6-a3d2-883f93e04eb8" (UID: "fb61c726-e017-45d6-a3d2-883f93e04eb8"). InnerVolumeSpecName "kube-api-access-jwz87". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.858999 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "fb61c726-e017-45d6-a3d2-883f93e04eb8" (UID: "fb61c726-e017-45d6-a3d2-883f93e04eb8"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.867471 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "fb61c726-e017-45d6-a3d2-883f93e04eb8" (UID: "fb61c726-e017-45d6-a3d2-883f93e04eb8"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.895607 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-inventory" (OuterVolumeSpecName: "inventory") pod "fb61c726-e017-45d6-a3d2-883f93e04eb8" (UID: "fb61c726-e017-45d6-a3d2-883f93e04eb8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.896323 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "fb61c726-e017-45d6-a3d2-883f93e04eb8" (UID: "fb61c726-e017-45d6-a3d2-883f93e04eb8"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.932615 4755 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.932657 4755 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.932675 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwz87\" (UniqueName: \"kubernetes.io/projected/fb61c726-e017-45d6-a3d2-883f93e04eb8-kube-api-access-jwz87\") on node \"crc\" DevicePath \"\"" Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.932690 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.932703 4755 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:10:07 crc kubenswrapper[4755]: I0202 23:10:07.932716 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fb61c726-e017-45d6-a3d2-883f93e04eb8-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.152001 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" event={"ID":"fb61c726-e017-45d6-a3d2-883f93e04eb8","Type":"ContainerDied","Data":"88fc5cd444b9df9cea93d2d14323d2fceb23366d263acd7263cb76a4497aa27a"} Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.152048 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88fc5cd444b9df9cea93d2d14323d2fceb23366d263acd7263cb76a4497aa27a" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.152110 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.336650 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz"] Feb 02 23:10:08 crc kubenswrapper[4755]: E0202 23:10:08.337720 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb61c726-e017-45d6-a3d2-883f93e04eb8" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.337838 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb61c726-e017-45d6-a3d2-883f93e04eb8" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.338182 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb61c726-e017-45d6-a3d2-883f93e04eb8" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.339212 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.344785 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.344931 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.345046 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.345182 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.345407 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.346021 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz"] Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.451374 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk47z\" (UniqueName: \"kubernetes.io/projected/b7b416e0-c078-47ba-97e8-f7c16294e8e5-kube-api-access-jk47z\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.451697 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.451849 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.451947 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.452102 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.554274 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.554372 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.554458 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.554762 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk47z\" (UniqueName: \"kubernetes.io/projected/b7b416e0-c078-47ba-97e8-f7c16294e8e5-kube-api-access-jk47z\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.555557 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.560269 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.560337 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.561013 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.564110 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.576339 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk47z\" (UniqueName: \"kubernetes.io/projected/b7b416e0-c078-47ba-97e8-f7c16294e8e5-kube-api-access-jk47z\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-gncrz\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:08 crc kubenswrapper[4755]: I0202 23:10:08.713293 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:10:09 crc kubenswrapper[4755]: I0202 23:10:09.314962 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz"] Feb 02 23:10:09 crc kubenswrapper[4755]: W0202 23:10:09.326066 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7b416e0_c078_47ba_97e8_f7c16294e8e5.slice/crio-25d518056521af62675bc3af068e34544190873be3de2d57958fdf51cc2a3fbc WatchSource:0}: Error finding container 25d518056521af62675bc3af068e34544190873be3de2d57958fdf51cc2a3fbc: Status 404 returned error can't find the container with id 25d518056521af62675bc3af068e34544190873be3de2d57958fdf51cc2a3fbc Feb 02 23:10:10 crc kubenswrapper[4755]: I0202 23:10:10.179934 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" event={"ID":"b7b416e0-c078-47ba-97e8-f7c16294e8e5","Type":"ContainerStarted","Data":"25d518056521af62675bc3af068e34544190873be3de2d57958fdf51cc2a3fbc"} Feb 02 23:10:11 crc kubenswrapper[4755]: I0202 23:10:11.188898 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" event={"ID":"b7b416e0-c078-47ba-97e8-f7c16294e8e5","Type":"ContainerStarted","Data":"e88692a92d5e10bf689f953b3107c8f8f802962af3d3dc807bd81b3e1fd86f51"} Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.324697 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" podStartSLOduration=17.332722961 podStartE2EDuration="18.324678782s" podCreationTimestamp="2026-02-02 23:10:08 +0000 UTC" firstStartedPulling="2026-02-02 23:10:09.331111792 +0000 UTC m=+2165.022332128" lastFinishedPulling="2026-02-02 23:10:10.323067623 +0000 UTC m=+2166.014287949" observedRunningTime="2026-02-02 23:10:11.205765882 +0000 UTC m=+2166.896986208" watchObservedRunningTime="2026-02-02 23:10:26.324678782 +0000 UTC m=+2182.015899118" Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.338070 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cgvql"] Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.341296 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.355721 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cgvql"] Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.514603 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkm8p\" (UniqueName: \"kubernetes.io/projected/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-kube-api-access-qkm8p\") pod \"certified-operators-cgvql\" (UID: \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\") " pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.514954 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-catalog-content\") pod \"certified-operators-cgvql\" (UID: \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\") " pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.514984 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-utilities\") pod \"certified-operators-cgvql\" (UID: \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\") " pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.618334 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkm8p\" (UniqueName: \"kubernetes.io/projected/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-kube-api-access-qkm8p\") pod \"certified-operators-cgvql\" (UID: \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\") " pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.618382 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-catalog-content\") pod \"certified-operators-cgvql\" (UID: \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\") " pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.618408 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-utilities\") pod \"certified-operators-cgvql\" (UID: \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\") " pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.618940 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-utilities\") pod \"certified-operators-cgvql\" (UID: \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\") " pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.619081 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-catalog-content\") pod \"certified-operators-cgvql\" (UID: \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\") " pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.659645 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkm8p\" (UniqueName: \"kubernetes.io/projected/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-kube-api-access-qkm8p\") pod \"certified-operators-cgvql\" (UID: \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\") " pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:26 crc kubenswrapper[4755]: I0202 23:10:26.699734 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:27 crc kubenswrapper[4755]: W0202 23:10:27.180151 4755 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea5dc331_ff0d_4bb8_9568_b6a94d77a0c4.slice/crio-78e311efbca8df8c778d22b2cab5b53a64c271441dada62a44280ae80cf2a586 WatchSource:0}: Error finding container 78e311efbca8df8c778d22b2cab5b53a64c271441dada62a44280ae80cf2a586: Status 404 returned error can't find the container with id 78e311efbca8df8c778d22b2cab5b53a64c271441dada62a44280ae80cf2a586 Feb 02 23:10:27 crc kubenswrapper[4755]: I0202 23:10:27.181713 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cgvql"] Feb 02 23:10:27 crc kubenswrapper[4755]: I0202 23:10:27.390284 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgvql" event={"ID":"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4","Type":"ContainerStarted","Data":"567abeb3814f18d6cbb5d775d377e4996e2a11c0e52d85d99df36e674063fd23"} Feb 02 23:10:27 crc kubenswrapper[4755]: I0202 23:10:27.390666 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgvql" event={"ID":"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4","Type":"ContainerStarted","Data":"78e311efbca8df8c778d22b2cab5b53a64c271441dada62a44280ae80cf2a586"} Feb 02 23:10:28 crc kubenswrapper[4755]: I0202 23:10:28.405402 4755 generic.go:334] "Generic (PLEG): container finished" podID="ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" containerID="567abeb3814f18d6cbb5d775d377e4996e2a11c0e52d85d99df36e674063fd23" exitCode=0 Feb 02 23:10:28 crc kubenswrapper[4755]: I0202 23:10:28.405495 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgvql" event={"ID":"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4","Type":"ContainerDied","Data":"567abeb3814f18d6cbb5d775d377e4996e2a11c0e52d85d99df36e674063fd23"} Feb 02 23:10:30 crc kubenswrapper[4755]: I0202 23:10:30.431069 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgvql" event={"ID":"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4","Type":"ContainerStarted","Data":"f2e34265f1c16ca7ce7b51085d1e6668c3f0d9a51422191a620754266eda722a"} Feb 02 23:10:31 crc kubenswrapper[4755]: I0202 23:10:31.445829 4755 generic.go:334] "Generic (PLEG): container finished" podID="ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" containerID="f2e34265f1c16ca7ce7b51085d1e6668c3f0d9a51422191a620754266eda722a" exitCode=0 Feb 02 23:10:31 crc kubenswrapper[4755]: I0202 23:10:31.445879 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgvql" event={"ID":"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4","Type":"ContainerDied","Data":"f2e34265f1c16ca7ce7b51085d1e6668c3f0d9a51422191a620754266eda722a"} Feb 02 23:10:32 crc kubenswrapper[4755]: I0202 23:10:32.461907 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgvql" event={"ID":"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4","Type":"ContainerStarted","Data":"1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e"} Feb 02 23:10:32 crc kubenswrapper[4755]: I0202 23:10:32.501818 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cgvql" podStartSLOduration=3.057710198 podStartE2EDuration="6.501714878s" podCreationTimestamp="2026-02-02 23:10:26 +0000 UTC" firstStartedPulling="2026-02-02 23:10:28.411105224 +0000 UTC m=+2184.102325580" lastFinishedPulling="2026-02-02 23:10:31.855109934 +0000 UTC m=+2187.546330260" observedRunningTime="2026-02-02 23:10:32.494284189 +0000 UTC m=+2188.185504525" watchObservedRunningTime="2026-02-02 23:10:32.501714878 +0000 UTC m=+2188.192935214" Feb 02 23:10:36 crc kubenswrapper[4755]: I0202 23:10:36.700631 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:36 crc kubenswrapper[4755]: I0202 23:10:36.701147 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:36 crc kubenswrapper[4755]: I0202 23:10:36.768388 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:37 crc kubenswrapper[4755]: I0202 23:10:37.562430 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:37 crc kubenswrapper[4755]: I0202 23:10:37.623359 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cgvql"] Feb 02 23:10:39 crc kubenswrapper[4755]: I0202 23:10:39.535541 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cgvql" podUID="ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" containerName="registry-server" containerID="cri-o://1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e" gracePeriod=2 Feb 02 23:10:39 crc kubenswrapper[4755]: I0202 23:10:39.998669 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.176334 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-utilities\") pod \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\" (UID: \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\") " Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.176479 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkm8p\" (UniqueName: \"kubernetes.io/projected/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-kube-api-access-qkm8p\") pod \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\" (UID: \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\") " Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.176621 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-catalog-content\") pod \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\" (UID: \"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4\") " Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.177519 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-utilities" (OuterVolumeSpecName: "utilities") pod "ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" (UID: "ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.183004 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-kube-api-access-qkm8p" (OuterVolumeSpecName: "kube-api-access-qkm8p") pod "ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" (UID: "ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4"). InnerVolumeSpecName "kube-api-access-qkm8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.222797 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" (UID: "ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.279598 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.279646 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkm8p\" (UniqueName: \"kubernetes.io/projected/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-kube-api-access-qkm8p\") on node \"crc\" DevicePath \"\"" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.279664 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.549872 4755 generic.go:334] "Generic (PLEG): container finished" podID="ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" containerID="1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e" exitCode=0 Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.549920 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgvql" event={"ID":"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4","Type":"ContainerDied","Data":"1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e"} Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.549950 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgvql" event={"ID":"ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4","Type":"ContainerDied","Data":"78e311efbca8df8c778d22b2cab5b53a64c271441dada62a44280ae80cf2a586"} Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.549968 4755 scope.go:117] "RemoveContainer" containerID="1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.549925 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cgvql" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.582420 4755 scope.go:117] "RemoveContainer" containerID="f2e34265f1c16ca7ce7b51085d1e6668c3f0d9a51422191a620754266eda722a" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.599909 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cgvql"] Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.611990 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cgvql"] Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.617029 4755 scope.go:117] "RemoveContainer" containerID="567abeb3814f18d6cbb5d775d377e4996e2a11c0e52d85d99df36e674063fd23" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.685826 4755 scope.go:117] "RemoveContainer" containerID="1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e" Feb 02 23:10:40 crc kubenswrapper[4755]: E0202 23:10:40.686511 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e\": container with ID starting with 1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e not found: ID does not exist" containerID="1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.686581 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e"} err="failed to get container status \"1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e\": rpc error: code = NotFound desc = could not find container \"1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e\": container with ID starting with 1390bd2cad3fce55794c385657a589a687f19af0b3df894ca34b341cff8b848e not found: ID does not exist" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.686630 4755 scope.go:117] "RemoveContainer" containerID="f2e34265f1c16ca7ce7b51085d1e6668c3f0d9a51422191a620754266eda722a" Feb 02 23:10:40 crc kubenswrapper[4755]: E0202 23:10:40.689547 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2e34265f1c16ca7ce7b51085d1e6668c3f0d9a51422191a620754266eda722a\": container with ID starting with f2e34265f1c16ca7ce7b51085d1e6668c3f0d9a51422191a620754266eda722a not found: ID does not exist" containerID="f2e34265f1c16ca7ce7b51085d1e6668c3f0d9a51422191a620754266eda722a" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.689614 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2e34265f1c16ca7ce7b51085d1e6668c3f0d9a51422191a620754266eda722a"} err="failed to get container status \"f2e34265f1c16ca7ce7b51085d1e6668c3f0d9a51422191a620754266eda722a\": rpc error: code = NotFound desc = could not find container \"f2e34265f1c16ca7ce7b51085d1e6668c3f0d9a51422191a620754266eda722a\": container with ID starting with f2e34265f1c16ca7ce7b51085d1e6668c3f0d9a51422191a620754266eda722a not found: ID does not exist" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.689656 4755 scope.go:117] "RemoveContainer" containerID="567abeb3814f18d6cbb5d775d377e4996e2a11c0e52d85d99df36e674063fd23" Feb 02 23:10:40 crc kubenswrapper[4755]: E0202 23:10:40.690560 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"567abeb3814f18d6cbb5d775d377e4996e2a11c0e52d85d99df36e674063fd23\": container with ID starting with 567abeb3814f18d6cbb5d775d377e4996e2a11c0e52d85d99df36e674063fd23 not found: ID does not exist" containerID="567abeb3814f18d6cbb5d775d377e4996e2a11c0e52d85d99df36e674063fd23" Feb 02 23:10:40 crc kubenswrapper[4755]: I0202 23:10:40.690627 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"567abeb3814f18d6cbb5d775d377e4996e2a11c0e52d85d99df36e674063fd23"} err="failed to get container status \"567abeb3814f18d6cbb5d775d377e4996e2a11c0e52d85d99df36e674063fd23\": rpc error: code = NotFound desc = could not find container \"567abeb3814f18d6cbb5d775d377e4996e2a11c0e52d85d99df36e674063fd23\": container with ID starting with 567abeb3814f18d6cbb5d775d377e4996e2a11c0e52d85d99df36e674063fd23 not found: ID does not exist" Feb 02 23:10:41 crc kubenswrapper[4755]: I0202 23:10:41.093656 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" path="/var/lib/kubelet/pods/ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4/volumes" Feb 02 23:10:53 crc kubenswrapper[4755]: I0202 23:10:53.389343 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:10:53 crc kubenswrapper[4755]: I0202 23:10:53.389997 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:11:23 crc kubenswrapper[4755]: I0202 23:11:23.391028 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:11:23 crc kubenswrapper[4755]: I0202 23:11:23.392330 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:11:53 crc kubenswrapper[4755]: I0202 23:11:53.389068 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:11:53 crc kubenswrapper[4755]: I0202 23:11:53.389931 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:11:53 crc kubenswrapper[4755]: I0202 23:11:53.390006 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 23:11:53 crc kubenswrapper[4755]: I0202 23:11:53.391360 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089"} pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 23:11:53 crc kubenswrapper[4755]: I0202 23:11:53.391460 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" containerID="cri-o://79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" gracePeriod=600 Feb 02 23:11:53 crc kubenswrapper[4755]: E0202 23:11:53.533466 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:11:53 crc kubenswrapper[4755]: I0202 23:11:53.554867 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" exitCode=0 Feb 02 23:11:53 crc kubenswrapper[4755]: I0202 23:11:53.554900 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerDied","Data":"79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089"} Feb 02 23:11:53 crc kubenswrapper[4755]: I0202 23:11:53.554953 4755 scope.go:117] "RemoveContainer" containerID="30bad18ffb9e582a32f758848770c077d9c7b99e4ba3e8c0b3b77128686a49e6" Feb 02 23:11:53 crc kubenswrapper[4755]: I0202 23:11:53.556600 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:11:53 crc kubenswrapper[4755]: E0202 23:11:53.557758 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:12:05 crc kubenswrapper[4755]: I0202 23:12:05.086203 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:12:05 crc kubenswrapper[4755]: E0202 23:12:05.087790 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:12:18 crc kubenswrapper[4755]: I0202 23:12:18.069431 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:12:18 crc kubenswrapper[4755]: E0202 23:12:18.070691 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:12:31 crc kubenswrapper[4755]: I0202 23:12:31.069973 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:12:31 crc kubenswrapper[4755]: E0202 23:12:31.070829 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:12:44 crc kubenswrapper[4755]: I0202 23:12:44.068891 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:12:44 crc kubenswrapper[4755]: E0202 23:12:44.069640 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:12:56 crc kubenswrapper[4755]: I0202 23:12:56.069082 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:12:56 crc kubenswrapper[4755]: E0202 23:12:56.069932 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:13:11 crc kubenswrapper[4755]: I0202 23:13:11.069946 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:13:11 crc kubenswrapper[4755]: E0202 23:13:11.070707 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:13:22 crc kubenswrapper[4755]: I0202 23:13:22.069319 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:13:22 crc kubenswrapper[4755]: E0202 23:13:22.071071 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:13:35 crc kubenswrapper[4755]: I0202 23:13:35.083365 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:13:35 crc kubenswrapper[4755]: E0202 23:13:35.086436 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:13:48 crc kubenswrapper[4755]: I0202 23:13:48.069211 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:13:48 crc kubenswrapper[4755]: E0202 23:13:48.070357 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:14:00 crc kubenswrapper[4755]: I0202 23:14:00.069892 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:14:00 crc kubenswrapper[4755]: E0202 23:14:00.071171 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:14:08 crc kubenswrapper[4755]: I0202 23:14:08.188440 4755 generic.go:334] "Generic (PLEG): container finished" podID="b7b416e0-c078-47ba-97e8-f7c16294e8e5" containerID="e88692a92d5e10bf689f953b3107c8f8f802962af3d3dc807bd81b3e1fd86f51" exitCode=0 Feb 02 23:14:08 crc kubenswrapper[4755]: I0202 23:14:08.188509 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" event={"ID":"b7b416e0-c078-47ba-97e8-f7c16294e8e5","Type":"ContainerDied","Data":"e88692a92d5e10bf689f953b3107c8f8f802962af3d3dc807bd81b3e1fd86f51"} Feb 02 23:14:09 crc kubenswrapper[4755]: I0202 23:14:09.777041 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:14:09 crc kubenswrapper[4755]: I0202 23:14:09.902281 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jk47z\" (UniqueName: \"kubernetes.io/projected/b7b416e0-c078-47ba-97e8-f7c16294e8e5-kube-api-access-jk47z\") pod \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " Feb 02 23:14:09 crc kubenswrapper[4755]: I0202 23:14:09.902374 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-ssh-key-openstack-edpm-ipam\") pod \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " Feb 02 23:14:09 crc kubenswrapper[4755]: I0202 23:14:09.902451 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-inventory\") pod \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " Feb 02 23:14:09 crc kubenswrapper[4755]: I0202 23:14:09.902480 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-libvirt-combined-ca-bundle\") pod \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " Feb 02 23:14:09 crc kubenswrapper[4755]: I0202 23:14:09.902514 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-libvirt-secret-0\") pod \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\" (UID: \"b7b416e0-c078-47ba-97e8-f7c16294e8e5\") " Feb 02 23:14:09 crc kubenswrapper[4755]: I0202 23:14:09.908376 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7b416e0-c078-47ba-97e8-f7c16294e8e5-kube-api-access-jk47z" (OuterVolumeSpecName: "kube-api-access-jk47z") pod "b7b416e0-c078-47ba-97e8-f7c16294e8e5" (UID: "b7b416e0-c078-47ba-97e8-f7c16294e8e5"). InnerVolumeSpecName "kube-api-access-jk47z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:14:09 crc kubenswrapper[4755]: I0202 23:14:09.909939 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "b7b416e0-c078-47ba-97e8-f7c16294e8e5" (UID: "b7b416e0-c078-47ba-97e8-f7c16294e8e5"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:14:09 crc kubenswrapper[4755]: I0202 23:14:09.930870 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "b7b416e0-c078-47ba-97e8-f7c16294e8e5" (UID: "b7b416e0-c078-47ba-97e8-f7c16294e8e5"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:14:09 crc kubenswrapper[4755]: I0202 23:14:09.938952 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-inventory" (OuterVolumeSpecName: "inventory") pod "b7b416e0-c078-47ba-97e8-f7c16294e8e5" (UID: "b7b416e0-c078-47ba-97e8-f7c16294e8e5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:14:09 crc kubenswrapper[4755]: I0202 23:14:09.940155 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "b7b416e0-c078-47ba-97e8-f7c16294e8e5" (UID: "b7b416e0-c078-47ba-97e8-f7c16294e8e5"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.004663 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jk47z\" (UniqueName: \"kubernetes.io/projected/b7b416e0-c078-47ba-97e8-f7c16294e8e5-kube-api-access-jk47z\") on node \"crc\" DevicePath \"\"" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.004697 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.004708 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.004719 4755 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.004744 4755 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/b7b416e0-c078-47ba-97e8-f7c16294e8e5-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.224371 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" event={"ID":"b7b416e0-c078-47ba-97e8-f7c16294e8e5","Type":"ContainerDied","Data":"25d518056521af62675bc3af068e34544190873be3de2d57958fdf51cc2a3fbc"} Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.224457 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25d518056521af62675bc3af068e34544190873be3de2d57958fdf51cc2a3fbc" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.224500 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-gncrz" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.372264 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs"] Feb 02 23:14:10 crc kubenswrapper[4755]: E0202 23:14:10.372787 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" containerName="extract-content" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.372808 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" containerName="extract-content" Feb 02 23:14:10 crc kubenswrapper[4755]: E0202 23:14:10.372825 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" containerName="extract-utilities" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.372836 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" containerName="extract-utilities" Feb 02 23:14:10 crc kubenswrapper[4755]: E0202 23:14:10.372860 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" containerName="registry-server" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.372871 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" containerName="registry-server" Feb 02 23:14:10 crc kubenswrapper[4755]: E0202 23:14:10.372916 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7b416e0-c078-47ba-97e8-f7c16294e8e5" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.372929 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7b416e0-c078-47ba-97e8-f7c16294e8e5" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.373244 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7b416e0-c078-47ba-97e8-f7c16294e8e5" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.373269 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea5dc331-ff0d-4bb8-9568-b6a94d77a0c4" containerName="registry-server" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.374249 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.378271 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.381880 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.382431 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.382696 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.382975 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.383228 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.383593 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.436241 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs"] Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.514430 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.514493 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.514532 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.514624 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.514647 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.514677 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.514813 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.514854 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.514887 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf89m\" (UniqueName: \"kubernetes.io/projected/57c65d38-e362-4fcb-80e6-8e1881c990f1-kube-api-access-jf89m\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.617539 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.617758 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.617834 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.617931 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.618220 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.618327 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.618440 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf89m\" (UniqueName: \"kubernetes.io/projected/57c65d38-e362-4fcb-80e6-8e1881c990f1-kube-api-access-jf89m\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.618554 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.618618 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.619409 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.624003 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.624781 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.625020 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.626325 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.626478 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.629172 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.630898 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.657368 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf89m\" (UniqueName: \"kubernetes.io/projected/57c65d38-e362-4fcb-80e6-8e1881c990f1-kube-api-access-jf89m\") pod \"nova-edpm-deployment-openstack-edpm-ipam-pp6rs\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:10 crc kubenswrapper[4755]: I0202 23:14:10.734989 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:14:11 crc kubenswrapper[4755]: I0202 23:14:11.317059 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs"] Feb 02 23:14:11 crc kubenswrapper[4755]: I0202 23:14:11.320855 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 23:14:12 crc kubenswrapper[4755]: I0202 23:14:12.244126 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" event={"ID":"57c65d38-e362-4fcb-80e6-8e1881c990f1","Type":"ContainerStarted","Data":"d577e61dbe5eeca1f6d123b8871022506efb0dd87ef7a72bc385a5ce1bdb4235"} Feb 02 23:14:12 crc kubenswrapper[4755]: I0202 23:14:12.244516 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" event={"ID":"57c65d38-e362-4fcb-80e6-8e1881c990f1","Type":"ContainerStarted","Data":"128f647211838c2e9d3326ea97b1e3e29f1778243bac5cb01596b2830fc90926"} Feb 02 23:14:12 crc kubenswrapper[4755]: I0202 23:14:12.279758 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" podStartSLOduration=1.701288662 podStartE2EDuration="2.279720924s" podCreationTimestamp="2026-02-02 23:14:10 +0000 UTC" firstStartedPulling="2026-02-02 23:14:11.320664374 +0000 UTC m=+2407.011884700" lastFinishedPulling="2026-02-02 23:14:11.899096636 +0000 UTC m=+2407.590316962" observedRunningTime="2026-02-02 23:14:12.271429981 +0000 UTC m=+2407.962650347" watchObservedRunningTime="2026-02-02 23:14:12.279720924 +0000 UTC m=+2407.970941260" Feb 02 23:14:15 crc kubenswrapper[4755]: I0202 23:14:15.075222 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:14:15 crc kubenswrapper[4755]: E0202 23:14:15.075986 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:14:29 crc kubenswrapper[4755]: I0202 23:14:29.070178 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:14:29 crc kubenswrapper[4755]: E0202 23:14:29.072236 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:14:42 crc kubenswrapper[4755]: I0202 23:14:42.069247 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:14:42 crc kubenswrapper[4755]: E0202 23:14:42.069955 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.069519 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:14:53 crc kubenswrapper[4755]: E0202 23:14:53.070576 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.218497 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2nv66"] Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.221083 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.291202 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2nv66"] Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.394211 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf8k2\" (UniqueName: \"kubernetes.io/projected/b148b012-e8a3-4215-a5b0-bc7572a37ead-kube-api-access-cf8k2\") pod \"community-operators-2nv66\" (UID: \"b148b012-e8a3-4215-a5b0-bc7572a37ead\") " pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.394708 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b148b012-e8a3-4215-a5b0-bc7572a37ead-catalog-content\") pod \"community-operators-2nv66\" (UID: \"b148b012-e8a3-4215-a5b0-bc7572a37ead\") " pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.395060 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b148b012-e8a3-4215-a5b0-bc7572a37ead-utilities\") pod \"community-operators-2nv66\" (UID: \"b148b012-e8a3-4215-a5b0-bc7572a37ead\") " pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.497266 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b148b012-e8a3-4215-a5b0-bc7572a37ead-utilities\") pod \"community-operators-2nv66\" (UID: \"b148b012-e8a3-4215-a5b0-bc7572a37ead\") " pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.497352 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf8k2\" (UniqueName: \"kubernetes.io/projected/b148b012-e8a3-4215-a5b0-bc7572a37ead-kube-api-access-cf8k2\") pod \"community-operators-2nv66\" (UID: \"b148b012-e8a3-4215-a5b0-bc7572a37ead\") " pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.497625 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b148b012-e8a3-4215-a5b0-bc7572a37ead-catalog-content\") pod \"community-operators-2nv66\" (UID: \"b148b012-e8a3-4215-a5b0-bc7572a37ead\") " pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.497858 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b148b012-e8a3-4215-a5b0-bc7572a37ead-utilities\") pod \"community-operators-2nv66\" (UID: \"b148b012-e8a3-4215-a5b0-bc7572a37ead\") " pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.498202 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b148b012-e8a3-4215-a5b0-bc7572a37ead-catalog-content\") pod \"community-operators-2nv66\" (UID: \"b148b012-e8a3-4215-a5b0-bc7572a37ead\") " pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.529613 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf8k2\" (UniqueName: \"kubernetes.io/projected/b148b012-e8a3-4215-a5b0-bc7572a37ead-kube-api-access-cf8k2\") pod \"community-operators-2nv66\" (UID: \"b148b012-e8a3-4215-a5b0-bc7572a37ead\") " pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:14:53 crc kubenswrapper[4755]: I0202 23:14:53.591770 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:14:54 crc kubenswrapper[4755]: I0202 23:14:54.164634 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2nv66"] Feb 02 23:14:54 crc kubenswrapper[4755]: I0202 23:14:54.756798 4755 generic.go:334] "Generic (PLEG): container finished" podID="b148b012-e8a3-4215-a5b0-bc7572a37ead" containerID="e57cde810bf97bfe095b322857741a8acdf5e768ff30fd07d35e9ca1cb32610a" exitCode=0 Feb 02 23:14:54 crc kubenswrapper[4755]: I0202 23:14:54.756909 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2nv66" event={"ID":"b148b012-e8a3-4215-a5b0-bc7572a37ead","Type":"ContainerDied","Data":"e57cde810bf97bfe095b322857741a8acdf5e768ff30fd07d35e9ca1cb32610a"} Feb 02 23:14:54 crc kubenswrapper[4755]: I0202 23:14:54.757078 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2nv66" event={"ID":"b148b012-e8a3-4215-a5b0-bc7572a37ead","Type":"ContainerStarted","Data":"276bb653de3939f7a17d971a4c5f3f7a23cb670f95def1ab2dcebbce82a2a53d"} Feb 02 23:14:55 crc kubenswrapper[4755]: I0202 23:14:55.805404 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2nv66" event={"ID":"b148b012-e8a3-4215-a5b0-bc7572a37ead","Type":"ContainerStarted","Data":"45c9823ff9c5931d42859e25cee5449aac6c082fd3b53772c1a14ee12576ed19"} Feb 02 23:14:56 crc kubenswrapper[4755]: I0202 23:14:56.817326 4755 generic.go:334] "Generic (PLEG): container finished" podID="b148b012-e8a3-4215-a5b0-bc7572a37ead" containerID="45c9823ff9c5931d42859e25cee5449aac6c082fd3b53772c1a14ee12576ed19" exitCode=0 Feb 02 23:14:56 crc kubenswrapper[4755]: I0202 23:14:56.817384 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2nv66" event={"ID":"b148b012-e8a3-4215-a5b0-bc7572a37ead","Type":"ContainerDied","Data":"45c9823ff9c5931d42859e25cee5449aac6c082fd3b53772c1a14ee12576ed19"} Feb 02 23:14:57 crc kubenswrapper[4755]: I0202 23:14:57.829843 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2nv66" event={"ID":"b148b012-e8a3-4215-a5b0-bc7572a37ead","Type":"ContainerStarted","Data":"3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0"} Feb 02 23:14:57 crc kubenswrapper[4755]: I0202 23:14:57.859976 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2nv66" podStartSLOduration=2.140461294 podStartE2EDuration="4.859951185s" podCreationTimestamp="2026-02-02 23:14:53 +0000 UTC" firstStartedPulling="2026-02-02 23:14:54.75926532 +0000 UTC m=+2450.450485686" lastFinishedPulling="2026-02-02 23:14:57.478755221 +0000 UTC m=+2453.169975577" observedRunningTime="2026-02-02 23:14:57.855285934 +0000 UTC m=+2453.546506300" watchObservedRunningTime="2026-02-02 23:14:57.859951185 +0000 UTC m=+2453.551171521" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.160973 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk"] Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.163461 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.166393 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.167867 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.186561 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk"] Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.290234 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7lfd\" (UniqueName: \"kubernetes.io/projected/85699574-9b4c-4adc-afd6-412e21a9bae3-kube-api-access-w7lfd\") pod \"collect-profiles-29501235-d87sk\" (UID: \"85699574-9b4c-4adc-afd6-412e21a9bae3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.290499 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85699574-9b4c-4adc-afd6-412e21a9bae3-secret-volume\") pod \"collect-profiles-29501235-d87sk\" (UID: \"85699574-9b4c-4adc-afd6-412e21a9bae3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.290575 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85699574-9b4c-4adc-afd6-412e21a9bae3-config-volume\") pod \"collect-profiles-29501235-d87sk\" (UID: \"85699574-9b4c-4adc-afd6-412e21a9bae3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.392371 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7lfd\" (UniqueName: \"kubernetes.io/projected/85699574-9b4c-4adc-afd6-412e21a9bae3-kube-api-access-w7lfd\") pod \"collect-profiles-29501235-d87sk\" (UID: \"85699574-9b4c-4adc-afd6-412e21a9bae3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.392512 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85699574-9b4c-4adc-afd6-412e21a9bae3-secret-volume\") pod \"collect-profiles-29501235-d87sk\" (UID: \"85699574-9b4c-4adc-afd6-412e21a9bae3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.392556 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85699574-9b4c-4adc-afd6-412e21a9bae3-config-volume\") pod \"collect-profiles-29501235-d87sk\" (UID: \"85699574-9b4c-4adc-afd6-412e21a9bae3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.393466 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85699574-9b4c-4adc-afd6-412e21a9bae3-config-volume\") pod \"collect-profiles-29501235-d87sk\" (UID: \"85699574-9b4c-4adc-afd6-412e21a9bae3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.397709 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85699574-9b4c-4adc-afd6-412e21a9bae3-secret-volume\") pod \"collect-profiles-29501235-d87sk\" (UID: \"85699574-9b4c-4adc-afd6-412e21a9bae3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.407667 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7lfd\" (UniqueName: \"kubernetes.io/projected/85699574-9b4c-4adc-afd6-412e21a9bae3-kube-api-access-w7lfd\") pod \"collect-profiles-29501235-d87sk\" (UID: \"85699574-9b4c-4adc-afd6-412e21a9bae3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:00 crc kubenswrapper[4755]: I0202 23:15:00.498310 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:01 crc kubenswrapper[4755]: I0202 23:15:01.016076 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk"] Feb 02 23:15:01 crc kubenswrapper[4755]: I0202 23:15:01.874960 4755 generic.go:334] "Generic (PLEG): container finished" podID="85699574-9b4c-4adc-afd6-412e21a9bae3" containerID="a54b103315b0f2e890b6b4207c20ac505e3919f066071d86a51948985f20765b" exitCode=0 Feb 02 23:15:01 crc kubenswrapper[4755]: I0202 23:15:01.875023 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" event={"ID":"85699574-9b4c-4adc-afd6-412e21a9bae3","Type":"ContainerDied","Data":"a54b103315b0f2e890b6b4207c20ac505e3919f066071d86a51948985f20765b"} Feb 02 23:15:01 crc kubenswrapper[4755]: I0202 23:15:01.875241 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" event={"ID":"85699574-9b4c-4adc-afd6-412e21a9bae3","Type":"ContainerStarted","Data":"3ac530452323ce5570c538fada4c758e3b7d8580b91ef9e3883d4570e47e6859"} Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.341365 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.472934 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7lfd\" (UniqueName: \"kubernetes.io/projected/85699574-9b4c-4adc-afd6-412e21a9bae3-kube-api-access-w7lfd\") pod \"85699574-9b4c-4adc-afd6-412e21a9bae3\" (UID: \"85699574-9b4c-4adc-afd6-412e21a9bae3\") " Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.473032 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85699574-9b4c-4adc-afd6-412e21a9bae3-config-volume\") pod \"85699574-9b4c-4adc-afd6-412e21a9bae3\" (UID: \"85699574-9b4c-4adc-afd6-412e21a9bae3\") " Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.473071 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85699574-9b4c-4adc-afd6-412e21a9bae3-secret-volume\") pod \"85699574-9b4c-4adc-afd6-412e21a9bae3\" (UID: \"85699574-9b4c-4adc-afd6-412e21a9bae3\") " Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.473790 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85699574-9b4c-4adc-afd6-412e21a9bae3-config-volume" (OuterVolumeSpecName: "config-volume") pod "85699574-9b4c-4adc-afd6-412e21a9bae3" (UID: "85699574-9b4c-4adc-afd6-412e21a9bae3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.479356 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85699574-9b4c-4adc-afd6-412e21a9bae3-kube-api-access-w7lfd" (OuterVolumeSpecName: "kube-api-access-w7lfd") pod "85699574-9b4c-4adc-afd6-412e21a9bae3" (UID: "85699574-9b4c-4adc-afd6-412e21a9bae3"). InnerVolumeSpecName "kube-api-access-w7lfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.479868 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85699574-9b4c-4adc-afd6-412e21a9bae3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "85699574-9b4c-4adc-afd6-412e21a9bae3" (UID: "85699574-9b4c-4adc-afd6-412e21a9bae3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.574943 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7lfd\" (UniqueName: \"kubernetes.io/projected/85699574-9b4c-4adc-afd6-412e21a9bae3-kube-api-access-w7lfd\") on node \"crc\" DevicePath \"\"" Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.574974 4755 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85699574-9b4c-4adc-afd6-412e21a9bae3-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.574984 4755 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85699574-9b4c-4adc-afd6-412e21a9bae3-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.592899 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.592973 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.656333 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.898063 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.898051 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501235-d87sk" event={"ID":"85699574-9b4c-4adc-afd6-412e21a9bae3","Type":"ContainerDied","Data":"3ac530452323ce5570c538fada4c758e3b7d8580b91ef9e3883d4570e47e6859"} Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.898108 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ac530452323ce5570c538fada4c758e3b7d8580b91ef9e3883d4570e47e6859" Feb 02 23:15:03 crc kubenswrapper[4755]: I0202 23:15:03.951386 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:15:04 crc kubenswrapper[4755]: I0202 23:15:04.412818 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5"] Feb 02 23:15:04 crc kubenswrapper[4755]: I0202 23:15:04.420736 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501190-785d5"] Feb 02 23:15:05 crc kubenswrapper[4755]: I0202 23:15:05.095412 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d480358-faea-430f-97ad-c49f7878007b" path="/var/lib/kubelet/pods/2d480358-faea-430f-97ad-c49f7878007b/volumes" Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.087289 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2nv66"] Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.087610 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2nv66" podUID="b148b012-e8a3-4215-a5b0-bc7572a37ead" containerName="registry-server" containerID="cri-o://3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0" gracePeriod=2 Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.647299 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.760164 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cf8k2\" (UniqueName: \"kubernetes.io/projected/b148b012-e8a3-4215-a5b0-bc7572a37ead-kube-api-access-cf8k2\") pod \"b148b012-e8a3-4215-a5b0-bc7572a37ead\" (UID: \"b148b012-e8a3-4215-a5b0-bc7572a37ead\") " Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.760304 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b148b012-e8a3-4215-a5b0-bc7572a37ead-catalog-content\") pod \"b148b012-e8a3-4215-a5b0-bc7572a37ead\" (UID: \"b148b012-e8a3-4215-a5b0-bc7572a37ead\") " Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.760460 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b148b012-e8a3-4215-a5b0-bc7572a37ead-utilities\") pod \"b148b012-e8a3-4215-a5b0-bc7572a37ead\" (UID: \"b148b012-e8a3-4215-a5b0-bc7572a37ead\") " Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.761156 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b148b012-e8a3-4215-a5b0-bc7572a37ead-utilities" (OuterVolumeSpecName: "utilities") pod "b148b012-e8a3-4215-a5b0-bc7572a37ead" (UID: "b148b012-e8a3-4215-a5b0-bc7572a37ead"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.762233 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b148b012-e8a3-4215-a5b0-bc7572a37ead-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.768174 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b148b012-e8a3-4215-a5b0-bc7572a37ead-kube-api-access-cf8k2" (OuterVolumeSpecName: "kube-api-access-cf8k2") pod "b148b012-e8a3-4215-a5b0-bc7572a37ead" (UID: "b148b012-e8a3-4215-a5b0-bc7572a37ead"). InnerVolumeSpecName "kube-api-access-cf8k2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.836907 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b148b012-e8a3-4215-a5b0-bc7572a37ead-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b148b012-e8a3-4215-a5b0-bc7572a37ead" (UID: "b148b012-e8a3-4215-a5b0-bc7572a37ead"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.864404 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cf8k2\" (UniqueName: \"kubernetes.io/projected/b148b012-e8a3-4215-a5b0-bc7572a37ead-kube-api-access-cf8k2\") on node \"crc\" DevicePath \"\"" Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.864437 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b148b012-e8a3-4215-a5b0-bc7572a37ead-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.933645 4755 generic.go:334] "Generic (PLEG): container finished" podID="b148b012-e8a3-4215-a5b0-bc7572a37ead" containerID="3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0" exitCode=0 Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.933695 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2nv66" event={"ID":"b148b012-e8a3-4215-a5b0-bc7572a37ead","Type":"ContainerDied","Data":"3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0"} Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.933740 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2nv66" event={"ID":"b148b012-e8a3-4215-a5b0-bc7572a37ead","Type":"ContainerDied","Data":"276bb653de3939f7a17d971a4c5f3f7a23cb670f95def1ab2dcebbce82a2a53d"} Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.933786 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2nv66" Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.933791 4755 scope.go:117] "RemoveContainer" containerID="3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0" Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.966294 4755 scope.go:117] "RemoveContainer" containerID="45c9823ff9c5931d42859e25cee5449aac6c082fd3b53772c1a14ee12576ed19" Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.980880 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2nv66"] Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.989088 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2nv66"] Feb 02 23:15:06 crc kubenswrapper[4755]: I0202 23:15:06.999012 4755 scope.go:117] "RemoveContainer" containerID="e57cde810bf97bfe095b322857741a8acdf5e768ff30fd07d35e9ca1cb32610a" Feb 02 23:15:07 crc kubenswrapper[4755]: I0202 23:15:07.067030 4755 scope.go:117] "RemoveContainer" containerID="3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0" Feb 02 23:15:07 crc kubenswrapper[4755]: E0202 23:15:07.067925 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0\": container with ID starting with 3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0 not found: ID does not exist" containerID="3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0" Feb 02 23:15:07 crc kubenswrapper[4755]: I0202 23:15:07.067986 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0"} err="failed to get container status \"3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0\": rpc error: code = NotFound desc = could not find container \"3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0\": container with ID starting with 3288e47617eb64730d5615ed9ffed982498b0d2f1a6b978fc8d7f58568908de0 not found: ID does not exist" Feb 02 23:15:07 crc kubenswrapper[4755]: I0202 23:15:07.068044 4755 scope.go:117] "RemoveContainer" containerID="45c9823ff9c5931d42859e25cee5449aac6c082fd3b53772c1a14ee12576ed19" Feb 02 23:15:07 crc kubenswrapper[4755]: E0202 23:15:07.068666 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45c9823ff9c5931d42859e25cee5449aac6c082fd3b53772c1a14ee12576ed19\": container with ID starting with 45c9823ff9c5931d42859e25cee5449aac6c082fd3b53772c1a14ee12576ed19 not found: ID does not exist" containerID="45c9823ff9c5931d42859e25cee5449aac6c082fd3b53772c1a14ee12576ed19" Feb 02 23:15:07 crc kubenswrapper[4755]: I0202 23:15:07.068712 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45c9823ff9c5931d42859e25cee5449aac6c082fd3b53772c1a14ee12576ed19"} err="failed to get container status \"45c9823ff9c5931d42859e25cee5449aac6c082fd3b53772c1a14ee12576ed19\": rpc error: code = NotFound desc = could not find container \"45c9823ff9c5931d42859e25cee5449aac6c082fd3b53772c1a14ee12576ed19\": container with ID starting with 45c9823ff9c5931d42859e25cee5449aac6c082fd3b53772c1a14ee12576ed19 not found: ID does not exist" Feb 02 23:15:07 crc kubenswrapper[4755]: I0202 23:15:07.068759 4755 scope.go:117] "RemoveContainer" containerID="e57cde810bf97bfe095b322857741a8acdf5e768ff30fd07d35e9ca1cb32610a" Feb 02 23:15:07 crc kubenswrapper[4755]: E0202 23:15:07.069177 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e57cde810bf97bfe095b322857741a8acdf5e768ff30fd07d35e9ca1cb32610a\": container with ID starting with e57cde810bf97bfe095b322857741a8acdf5e768ff30fd07d35e9ca1cb32610a not found: ID does not exist" containerID="e57cde810bf97bfe095b322857741a8acdf5e768ff30fd07d35e9ca1cb32610a" Feb 02 23:15:07 crc kubenswrapper[4755]: I0202 23:15:07.069231 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e57cde810bf97bfe095b322857741a8acdf5e768ff30fd07d35e9ca1cb32610a"} err="failed to get container status \"e57cde810bf97bfe095b322857741a8acdf5e768ff30fd07d35e9ca1cb32610a\": rpc error: code = NotFound desc = could not find container \"e57cde810bf97bfe095b322857741a8acdf5e768ff30fd07d35e9ca1cb32610a\": container with ID starting with e57cde810bf97bfe095b322857741a8acdf5e768ff30fd07d35e9ca1cb32610a not found: ID does not exist" Feb 02 23:15:07 crc kubenswrapper[4755]: I0202 23:15:07.092495 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b148b012-e8a3-4215-a5b0-bc7572a37ead" path="/var/lib/kubelet/pods/b148b012-e8a3-4215-a5b0-bc7572a37ead/volumes" Feb 02 23:15:08 crc kubenswrapper[4755]: I0202 23:15:08.070213 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:15:08 crc kubenswrapper[4755]: E0202 23:15:08.070701 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:15:11 crc kubenswrapper[4755]: I0202 23:15:11.883819 4755 scope.go:117] "RemoveContainer" containerID="7ccbfeee59ba0ed97fbce6a9cebca08514e7e81f082d93ca1693781c8081d75c" Feb 02 23:15:22 crc kubenswrapper[4755]: I0202 23:15:22.070864 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:15:22 crc kubenswrapper[4755]: E0202 23:15:22.071842 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:15:33 crc kubenswrapper[4755]: I0202 23:15:33.070033 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:15:33 crc kubenswrapper[4755]: E0202 23:15:33.071308 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:15:48 crc kubenswrapper[4755]: I0202 23:15:48.069899 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:15:48 crc kubenswrapper[4755]: E0202 23:15:48.071178 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:16:01 crc kubenswrapper[4755]: I0202 23:16:01.069844 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:16:01 crc kubenswrapper[4755]: E0202 23:16:01.070920 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:16:17 crc kubenswrapper[4755]: I0202 23:16:17.068855 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:16:17 crc kubenswrapper[4755]: E0202 23:16:17.069673 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:16:30 crc kubenswrapper[4755]: I0202 23:16:30.268290 4755 generic.go:334] "Generic (PLEG): container finished" podID="57c65d38-e362-4fcb-80e6-8e1881c990f1" containerID="d577e61dbe5eeca1f6d123b8871022506efb0dd87ef7a72bc385a5ce1bdb4235" exitCode=0 Feb 02 23:16:30 crc kubenswrapper[4755]: I0202 23:16:30.269137 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" event={"ID":"57c65d38-e362-4fcb-80e6-8e1881c990f1","Type":"ContainerDied","Data":"d577e61dbe5eeca1f6d123b8871022506efb0dd87ef7a72bc385a5ce1bdb4235"} Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.068976 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:16:31 crc kubenswrapper[4755]: E0202 23:16:31.069610 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.742889 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.929518 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-cell1-compute-config-0\") pod \"57c65d38-e362-4fcb-80e6-8e1881c990f1\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.929565 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-ssh-key-openstack-edpm-ipam\") pod \"57c65d38-e362-4fcb-80e6-8e1881c990f1\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.929717 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-migration-ssh-key-0\") pod \"57c65d38-e362-4fcb-80e6-8e1881c990f1\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.929769 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-combined-ca-bundle\") pod \"57c65d38-e362-4fcb-80e6-8e1881c990f1\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.929884 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-migration-ssh-key-1\") pod \"57c65d38-e362-4fcb-80e6-8e1881c990f1\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.929928 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-inventory\") pod \"57c65d38-e362-4fcb-80e6-8e1881c990f1\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.929962 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-extra-config-0\") pod \"57c65d38-e362-4fcb-80e6-8e1881c990f1\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.930430 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jf89m\" (UniqueName: \"kubernetes.io/projected/57c65d38-e362-4fcb-80e6-8e1881c990f1-kube-api-access-jf89m\") pod \"57c65d38-e362-4fcb-80e6-8e1881c990f1\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.930484 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-cell1-compute-config-1\") pod \"57c65d38-e362-4fcb-80e6-8e1881c990f1\" (UID: \"57c65d38-e362-4fcb-80e6-8e1881c990f1\") " Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.935671 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57c65d38-e362-4fcb-80e6-8e1881c990f1-kube-api-access-jf89m" (OuterVolumeSpecName: "kube-api-access-jf89m") pod "57c65d38-e362-4fcb-80e6-8e1881c990f1" (UID: "57c65d38-e362-4fcb-80e6-8e1881c990f1"). InnerVolumeSpecName "kube-api-access-jf89m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.938420 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "57c65d38-e362-4fcb-80e6-8e1881c990f1" (UID: "57c65d38-e362-4fcb-80e6-8e1881c990f1"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.959064 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "57c65d38-e362-4fcb-80e6-8e1881c990f1" (UID: "57c65d38-e362-4fcb-80e6-8e1881c990f1"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.966113 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "57c65d38-e362-4fcb-80e6-8e1881c990f1" (UID: "57c65d38-e362-4fcb-80e6-8e1881c990f1"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.971128 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "57c65d38-e362-4fcb-80e6-8e1881c990f1" (UID: "57c65d38-e362-4fcb-80e6-8e1881c990f1"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.971223 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "57c65d38-e362-4fcb-80e6-8e1881c990f1" (UID: "57c65d38-e362-4fcb-80e6-8e1881c990f1"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.971478 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-inventory" (OuterVolumeSpecName: "inventory") pod "57c65d38-e362-4fcb-80e6-8e1881c990f1" (UID: "57c65d38-e362-4fcb-80e6-8e1881c990f1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:16:31 crc kubenswrapper[4755]: I0202 23:16:31.983695 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "57c65d38-e362-4fcb-80e6-8e1881c990f1" (UID: "57c65d38-e362-4fcb-80e6-8e1881c990f1"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.009028 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "57c65d38-e362-4fcb-80e6-8e1881c990f1" (UID: "57c65d38-e362-4fcb-80e6-8e1881c990f1"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.034168 4755 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.034209 4755 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.034225 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.034236 4755 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.034250 4755 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.034260 4755 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.034272 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/57c65d38-e362-4fcb-80e6-8e1881c990f1-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.034283 4755 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/57c65d38-e362-4fcb-80e6-8e1881c990f1-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.034292 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jf89m\" (UniqueName: \"kubernetes.io/projected/57c65d38-e362-4fcb-80e6-8e1881c990f1-kube-api-access-jf89m\") on node \"crc\" DevicePath \"\"" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.292274 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" event={"ID":"57c65d38-e362-4fcb-80e6-8e1881c990f1","Type":"ContainerDied","Data":"128f647211838c2e9d3326ea97b1e3e29f1778243bac5cb01596b2830fc90926"} Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.292713 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="128f647211838c2e9d3326ea97b1e3e29f1778243bac5cb01596b2830fc90926" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.292674 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-pp6rs" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.404862 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk"] Feb 02 23:16:32 crc kubenswrapper[4755]: E0202 23:16:32.405341 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b148b012-e8a3-4215-a5b0-bc7572a37ead" containerName="registry-server" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.405361 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b148b012-e8a3-4215-a5b0-bc7572a37ead" containerName="registry-server" Feb 02 23:16:32 crc kubenswrapper[4755]: E0202 23:16:32.405380 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85699574-9b4c-4adc-afd6-412e21a9bae3" containerName="collect-profiles" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.405387 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="85699574-9b4c-4adc-afd6-412e21a9bae3" containerName="collect-profiles" Feb 02 23:16:32 crc kubenswrapper[4755]: E0202 23:16:32.405405 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57c65d38-e362-4fcb-80e6-8e1881c990f1" containerName="nova-edpm-deployment-openstack-edpm-ipam" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.405411 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="57c65d38-e362-4fcb-80e6-8e1881c990f1" containerName="nova-edpm-deployment-openstack-edpm-ipam" Feb 02 23:16:32 crc kubenswrapper[4755]: E0202 23:16:32.405431 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b148b012-e8a3-4215-a5b0-bc7572a37ead" containerName="extract-utilities" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.405437 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b148b012-e8a3-4215-a5b0-bc7572a37ead" containerName="extract-utilities" Feb 02 23:16:32 crc kubenswrapper[4755]: E0202 23:16:32.405451 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b148b012-e8a3-4215-a5b0-bc7572a37ead" containerName="extract-content" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.405457 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b148b012-e8a3-4215-a5b0-bc7572a37ead" containerName="extract-content" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.405643 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="57c65d38-e362-4fcb-80e6-8e1881c990f1" containerName="nova-edpm-deployment-openstack-edpm-ipam" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.405680 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b148b012-e8a3-4215-a5b0-bc7572a37ead" containerName="registry-server" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.405692 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="85699574-9b4c-4adc-afd6-412e21a9bae3" containerName="collect-profiles" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.406603 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.420026 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk"] Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.421070 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.421536 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.421954 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.422482 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-n24hl" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.422624 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.451966 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.452320 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.452460 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.452770 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kqqv\" (UniqueName: \"kubernetes.io/projected/811d66f3-04e9-4de2-8509-0c9f409addee-kube-api-access-7kqqv\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.452971 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.453184 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.453352 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.554363 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.554776 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.554969 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.555193 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kqqv\" (UniqueName: \"kubernetes.io/projected/811d66f3-04e9-4de2-8509-0c9f409addee-kube-api-access-7kqqv\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.555309 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.555462 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.555615 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.559116 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.559295 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.559482 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.561293 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.561370 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.561475 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.573843 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kqqv\" (UniqueName: \"kubernetes.io/projected/811d66f3-04e9-4de2-8509-0c9f409addee-kube-api-access-7kqqv\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:32 crc kubenswrapper[4755]: I0202 23:16:32.723847 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:16:33 crc kubenswrapper[4755]: I0202 23:16:33.375100 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk"] Feb 02 23:16:34 crc kubenswrapper[4755]: I0202 23:16:34.314356 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" event={"ID":"811d66f3-04e9-4de2-8509-0c9f409addee","Type":"ContainerStarted","Data":"27d3382434f2c8064013ef07206e8fa831d211306fefaf556580d196301ed355"} Feb 02 23:16:34 crc kubenswrapper[4755]: I0202 23:16:34.314698 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" event={"ID":"811d66f3-04e9-4de2-8509-0c9f409addee","Type":"ContainerStarted","Data":"c0979412865373583364a820e336aef47f5c5e0a15a20ac1889f72bf5792f17c"} Feb 02 23:16:34 crc kubenswrapper[4755]: I0202 23:16:34.340092 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" podStartSLOduration=1.849340839 podStartE2EDuration="2.340064659s" podCreationTimestamp="2026-02-02 23:16:32 +0000 UTC" firstStartedPulling="2026-02-02 23:16:33.374563188 +0000 UTC m=+2549.065783514" lastFinishedPulling="2026-02-02 23:16:33.865286978 +0000 UTC m=+2549.556507334" observedRunningTime="2026-02-02 23:16:34.332755174 +0000 UTC m=+2550.023975500" watchObservedRunningTime="2026-02-02 23:16:34.340064659 +0000 UTC m=+2550.031285015" Feb 02 23:16:42 crc kubenswrapper[4755]: I0202 23:16:42.071002 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:16:42 crc kubenswrapper[4755]: E0202 23:16:42.072908 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:16:55 crc kubenswrapper[4755]: I0202 23:16:55.081800 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:16:55 crc kubenswrapper[4755]: I0202 23:16:55.573996 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"c437a98306916ec88f266a1ace0a3fe9dc9ba521de37a1609e0180aacf69fd42"} Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.576018 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vtxx4"] Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.581521 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.620052 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtxx4"] Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.698812 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pnpj\" (UniqueName: \"kubernetes.io/projected/470ba500-ee7c-4a6a-a36b-56d6885cb155-kube-api-access-4pnpj\") pod \"redhat-marketplace-vtxx4\" (UID: \"470ba500-ee7c-4a6a-a36b-56d6885cb155\") " pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.698934 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/470ba500-ee7c-4a6a-a36b-56d6885cb155-catalog-content\") pod \"redhat-marketplace-vtxx4\" (UID: \"470ba500-ee7c-4a6a-a36b-56d6885cb155\") " pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.698964 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/470ba500-ee7c-4a6a-a36b-56d6885cb155-utilities\") pod \"redhat-marketplace-vtxx4\" (UID: \"470ba500-ee7c-4a6a-a36b-56d6885cb155\") " pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.800789 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/470ba500-ee7c-4a6a-a36b-56d6885cb155-catalog-content\") pod \"redhat-marketplace-vtxx4\" (UID: \"470ba500-ee7c-4a6a-a36b-56d6885cb155\") " pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.800841 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/470ba500-ee7c-4a6a-a36b-56d6885cb155-utilities\") pod \"redhat-marketplace-vtxx4\" (UID: \"470ba500-ee7c-4a6a-a36b-56d6885cb155\") " pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.801340 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/470ba500-ee7c-4a6a-a36b-56d6885cb155-utilities\") pod \"redhat-marketplace-vtxx4\" (UID: \"470ba500-ee7c-4a6a-a36b-56d6885cb155\") " pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.801482 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pnpj\" (UniqueName: \"kubernetes.io/projected/470ba500-ee7c-4a6a-a36b-56d6885cb155-kube-api-access-4pnpj\") pod \"redhat-marketplace-vtxx4\" (UID: \"470ba500-ee7c-4a6a-a36b-56d6885cb155\") " pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.801547 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/470ba500-ee7c-4a6a-a36b-56d6885cb155-catalog-content\") pod \"redhat-marketplace-vtxx4\" (UID: \"470ba500-ee7c-4a6a-a36b-56d6885cb155\") " pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.829448 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pnpj\" (UniqueName: \"kubernetes.io/projected/470ba500-ee7c-4a6a-a36b-56d6885cb155-kube-api-access-4pnpj\") pod \"redhat-marketplace-vtxx4\" (UID: \"470ba500-ee7c-4a6a-a36b-56d6885cb155\") " pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:04 crc kubenswrapper[4755]: I0202 23:19:04.950194 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:05 crc kubenswrapper[4755]: I0202 23:19:05.478697 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtxx4"] Feb 02 23:19:06 crc kubenswrapper[4755]: I0202 23:19:06.352531 4755 generic.go:334] "Generic (PLEG): container finished" podID="811d66f3-04e9-4de2-8509-0c9f409addee" containerID="27d3382434f2c8064013ef07206e8fa831d211306fefaf556580d196301ed355" exitCode=0 Feb 02 23:19:06 crc kubenswrapper[4755]: I0202 23:19:06.352619 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" event={"ID":"811d66f3-04e9-4de2-8509-0c9f409addee","Type":"ContainerDied","Data":"27d3382434f2c8064013ef07206e8fa831d211306fefaf556580d196301ed355"} Feb 02 23:19:06 crc kubenswrapper[4755]: I0202 23:19:06.355274 4755 generic.go:334] "Generic (PLEG): container finished" podID="470ba500-ee7c-4a6a-a36b-56d6885cb155" containerID="499486080dcea10f1d2da1ae2a212c9ee31f3e5646e72c4504616a7aee1b4be2" exitCode=0 Feb 02 23:19:06 crc kubenswrapper[4755]: I0202 23:19:06.355327 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtxx4" event={"ID":"470ba500-ee7c-4a6a-a36b-56d6885cb155","Type":"ContainerDied","Data":"499486080dcea10f1d2da1ae2a212c9ee31f3e5646e72c4504616a7aee1b4be2"} Feb 02 23:19:06 crc kubenswrapper[4755]: I0202 23:19:06.355360 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtxx4" event={"ID":"470ba500-ee7c-4a6a-a36b-56d6885cb155","Type":"ContainerStarted","Data":"75eb10cfa898afd3d56c1ea2285c4168468b075dac70871b2facaa4e24af8d59"} Feb 02 23:19:07 crc kubenswrapper[4755]: I0202 23:19:07.946944 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.002967 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-inventory\") pod \"811d66f3-04e9-4de2-8509-0c9f409addee\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.003031 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-1\") pod \"811d66f3-04e9-4de2-8509-0c9f409addee\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.003062 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-telemetry-combined-ca-bundle\") pod \"811d66f3-04e9-4de2-8509-0c9f409addee\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.003113 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kqqv\" (UniqueName: \"kubernetes.io/projected/811d66f3-04e9-4de2-8509-0c9f409addee-kube-api-access-7kqqv\") pod \"811d66f3-04e9-4de2-8509-0c9f409addee\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.003167 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-2\") pod \"811d66f3-04e9-4de2-8509-0c9f409addee\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.003195 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ssh-key-openstack-edpm-ipam\") pod \"811d66f3-04e9-4de2-8509-0c9f409addee\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.003257 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-0\") pod \"811d66f3-04e9-4de2-8509-0c9f409addee\" (UID: \"811d66f3-04e9-4de2-8509-0c9f409addee\") " Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.023963 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/811d66f3-04e9-4de2-8509-0c9f409addee-kube-api-access-7kqqv" (OuterVolumeSpecName: "kube-api-access-7kqqv") pod "811d66f3-04e9-4de2-8509-0c9f409addee" (UID: "811d66f3-04e9-4de2-8509-0c9f409addee"). InnerVolumeSpecName "kube-api-access-7kqqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.024348 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "811d66f3-04e9-4de2-8509-0c9f409addee" (UID: "811d66f3-04e9-4de2-8509-0c9f409addee"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.031858 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "811d66f3-04e9-4de2-8509-0c9f409addee" (UID: "811d66f3-04e9-4de2-8509-0c9f409addee"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.033946 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "811d66f3-04e9-4de2-8509-0c9f409addee" (UID: "811d66f3-04e9-4de2-8509-0c9f409addee"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.041194 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-inventory" (OuterVolumeSpecName: "inventory") pod "811d66f3-04e9-4de2-8509-0c9f409addee" (UID: "811d66f3-04e9-4de2-8509-0c9f409addee"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.049073 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "811d66f3-04e9-4de2-8509-0c9f409addee" (UID: "811d66f3-04e9-4de2-8509-0c9f409addee"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.052448 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "811d66f3-04e9-4de2-8509-0c9f409addee" (UID: "811d66f3-04e9-4de2-8509-0c9f409addee"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.105912 4755 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-inventory\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.105994 4755 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.106025 4755 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.106056 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kqqv\" (UniqueName: \"kubernetes.io/projected/811d66f3-04e9-4de2-8509-0c9f409addee-kube-api-access-7kqqv\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.106365 4755 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.106394 4755 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.106419 4755 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/811d66f3-04e9-4de2-8509-0c9f409addee-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.388499 4755 generic.go:334] "Generic (PLEG): container finished" podID="470ba500-ee7c-4a6a-a36b-56d6885cb155" containerID="d6417a6106ebf3d6f8351cddbe45b6487f4917ff24bb66feda0c42bdf11d6e7e" exitCode=0 Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.388579 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtxx4" event={"ID":"470ba500-ee7c-4a6a-a36b-56d6885cb155","Type":"ContainerDied","Data":"d6417a6106ebf3d6f8351cddbe45b6487f4917ff24bb66feda0c42bdf11d6e7e"} Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.393156 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" event={"ID":"811d66f3-04e9-4de2-8509-0c9f409addee","Type":"ContainerDied","Data":"c0979412865373583364a820e336aef47f5c5e0a15a20ac1889f72bf5792f17c"} Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.393200 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0979412865373583364a820e336aef47f5c5e0a15a20ac1889f72bf5792f17c" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.393263 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.758011 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lkz8r"] Feb 02 23:19:08 crc kubenswrapper[4755]: E0202 23:19:08.758707 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="811d66f3-04e9-4de2-8509-0c9f409addee" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.758769 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="811d66f3-04e9-4de2-8509-0c9f409addee" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.759192 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="811d66f3-04e9-4de2-8509-0c9f409addee" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.762086 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.790252 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lkz8r"] Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.826217 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-catalog-content\") pod \"redhat-operators-lkz8r\" (UID: \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\") " pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.826319 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2mgn\" (UniqueName: \"kubernetes.io/projected/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-kube-api-access-b2mgn\") pod \"redhat-operators-lkz8r\" (UID: \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\") " pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.826378 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-utilities\") pod \"redhat-operators-lkz8r\" (UID: \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\") " pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.928787 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-catalog-content\") pod \"redhat-operators-lkz8r\" (UID: \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\") " pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.928863 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2mgn\" (UniqueName: \"kubernetes.io/projected/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-kube-api-access-b2mgn\") pod \"redhat-operators-lkz8r\" (UID: \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\") " pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.928904 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-utilities\") pod \"redhat-operators-lkz8r\" (UID: \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\") " pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.929367 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-catalog-content\") pod \"redhat-operators-lkz8r\" (UID: \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\") " pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.929529 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-utilities\") pod \"redhat-operators-lkz8r\" (UID: \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\") " pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:08 crc kubenswrapper[4755]: I0202 23:19:08.959846 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2mgn\" (UniqueName: \"kubernetes.io/projected/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-kube-api-access-b2mgn\") pod \"redhat-operators-lkz8r\" (UID: \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\") " pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:09 crc kubenswrapper[4755]: I0202 23:19:09.115453 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:09 crc kubenswrapper[4755]: I0202 23:19:09.409819 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtxx4" event={"ID":"470ba500-ee7c-4a6a-a36b-56d6885cb155","Type":"ContainerStarted","Data":"da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694"} Feb 02 23:19:09 crc kubenswrapper[4755]: I0202 23:19:09.434239 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vtxx4" podStartSLOduration=2.9769233440000002 podStartE2EDuration="5.434221918s" podCreationTimestamp="2026-02-02 23:19:04 +0000 UTC" firstStartedPulling="2026-02-02 23:19:06.358850005 +0000 UTC m=+2702.050070341" lastFinishedPulling="2026-02-02 23:19:08.816148549 +0000 UTC m=+2704.507368915" observedRunningTime="2026-02-02 23:19:09.43285718 +0000 UTC m=+2705.124077526" watchObservedRunningTime="2026-02-02 23:19:09.434221918 +0000 UTC m=+2705.125442244" Feb 02 23:19:09 crc kubenswrapper[4755]: I0202 23:19:09.627202 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lkz8r"] Feb 02 23:19:10 crc kubenswrapper[4755]: I0202 23:19:10.430749 4755 generic.go:334] "Generic (PLEG): container finished" podID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" containerID="b1efe0989260265c45887749fe28477635c99c8dbf491e5e758338521104c2f6" exitCode=0 Feb 02 23:19:10 crc kubenswrapper[4755]: I0202 23:19:10.433407 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkz8r" event={"ID":"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925","Type":"ContainerDied","Data":"b1efe0989260265c45887749fe28477635c99c8dbf491e5e758338521104c2f6"} Feb 02 23:19:10 crc kubenswrapper[4755]: I0202 23:19:10.433469 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkz8r" event={"ID":"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925","Type":"ContainerStarted","Data":"cc727fcf39775f0f0693b01867bb5abd175ec44775f106fce07c603aff381beb"} Feb 02 23:19:12 crc kubenswrapper[4755]: I0202 23:19:12.456063 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkz8r" event={"ID":"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925","Type":"ContainerStarted","Data":"2a8eed6cfe206a45ca347d84929c29d134ea532bc92eaf3fe6611b57694d84d6"} Feb 02 23:19:14 crc kubenswrapper[4755]: I0202 23:19:14.480904 4755 generic.go:334] "Generic (PLEG): container finished" podID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" containerID="2a8eed6cfe206a45ca347d84929c29d134ea532bc92eaf3fe6611b57694d84d6" exitCode=0 Feb 02 23:19:14 crc kubenswrapper[4755]: I0202 23:19:14.480971 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkz8r" event={"ID":"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925","Type":"ContainerDied","Data":"2a8eed6cfe206a45ca347d84929c29d134ea532bc92eaf3fe6611b57694d84d6"} Feb 02 23:19:14 crc kubenswrapper[4755]: I0202 23:19:14.484074 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 23:19:14 crc kubenswrapper[4755]: I0202 23:19:14.950682 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:14 crc kubenswrapper[4755]: I0202 23:19:14.952207 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:14 crc kubenswrapper[4755]: I0202 23:19:14.998936 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:15 crc kubenswrapper[4755]: I0202 23:19:15.506104 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkz8r" event={"ID":"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925","Type":"ContainerStarted","Data":"449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a"} Feb 02 23:19:15 crc kubenswrapper[4755]: I0202 23:19:15.523556 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lkz8r" podStartSLOduration=3.078730831 podStartE2EDuration="7.523541837s" podCreationTimestamp="2026-02-02 23:19:08 +0000 UTC" firstStartedPulling="2026-02-02 23:19:10.437987206 +0000 UTC m=+2706.129207532" lastFinishedPulling="2026-02-02 23:19:14.882798212 +0000 UTC m=+2710.574018538" observedRunningTime="2026-02-02 23:19:15.521818219 +0000 UTC m=+2711.213038555" watchObservedRunningTime="2026-02-02 23:19:15.523541837 +0000 UTC m=+2711.214762163" Feb 02 23:19:15 crc kubenswrapper[4755]: I0202 23:19:15.557216 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:16 crc kubenswrapper[4755]: I0202 23:19:16.536146 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtxx4"] Feb 02 23:19:18 crc kubenswrapper[4755]: I0202 23:19:18.536933 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vtxx4" podUID="470ba500-ee7c-4a6a-a36b-56d6885cb155" containerName="registry-server" containerID="cri-o://da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694" gracePeriod=2 Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.116648 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.117367 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.146870 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.192624 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/470ba500-ee7c-4a6a-a36b-56d6885cb155-utilities\") pod \"470ba500-ee7c-4a6a-a36b-56d6885cb155\" (UID: \"470ba500-ee7c-4a6a-a36b-56d6885cb155\") " Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.192803 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pnpj\" (UniqueName: \"kubernetes.io/projected/470ba500-ee7c-4a6a-a36b-56d6885cb155-kube-api-access-4pnpj\") pod \"470ba500-ee7c-4a6a-a36b-56d6885cb155\" (UID: \"470ba500-ee7c-4a6a-a36b-56d6885cb155\") " Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.192857 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/470ba500-ee7c-4a6a-a36b-56d6885cb155-catalog-content\") pod \"470ba500-ee7c-4a6a-a36b-56d6885cb155\" (UID: \"470ba500-ee7c-4a6a-a36b-56d6885cb155\") " Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.194701 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/470ba500-ee7c-4a6a-a36b-56d6885cb155-utilities" (OuterVolumeSpecName: "utilities") pod "470ba500-ee7c-4a6a-a36b-56d6885cb155" (UID: "470ba500-ee7c-4a6a-a36b-56d6885cb155"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.199785 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/470ba500-ee7c-4a6a-a36b-56d6885cb155-kube-api-access-4pnpj" (OuterVolumeSpecName: "kube-api-access-4pnpj") pod "470ba500-ee7c-4a6a-a36b-56d6885cb155" (UID: "470ba500-ee7c-4a6a-a36b-56d6885cb155"). InnerVolumeSpecName "kube-api-access-4pnpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.217389 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/470ba500-ee7c-4a6a-a36b-56d6885cb155-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "470ba500-ee7c-4a6a-a36b-56d6885cb155" (UID: "470ba500-ee7c-4a6a-a36b-56d6885cb155"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.295215 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/470ba500-ee7c-4a6a-a36b-56d6885cb155-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.295244 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/470ba500-ee7c-4a6a-a36b-56d6885cb155-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.295255 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pnpj\" (UniqueName: \"kubernetes.io/projected/470ba500-ee7c-4a6a-a36b-56d6885cb155-kube-api-access-4pnpj\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.549570 4755 generic.go:334] "Generic (PLEG): container finished" podID="470ba500-ee7c-4a6a-a36b-56d6885cb155" containerID="da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694" exitCode=0 Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.549620 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtxx4" event={"ID":"470ba500-ee7c-4a6a-a36b-56d6885cb155","Type":"ContainerDied","Data":"da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694"} Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.549650 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtxx4" event={"ID":"470ba500-ee7c-4a6a-a36b-56d6885cb155","Type":"ContainerDied","Data":"75eb10cfa898afd3d56c1ea2285c4168468b075dac70871b2facaa4e24af8d59"} Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.549674 4755 scope.go:117] "RemoveContainer" containerID="da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.549625 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vtxx4" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.581080 4755 scope.go:117] "RemoveContainer" containerID="d6417a6106ebf3d6f8351cddbe45b6487f4917ff24bb66feda0c42bdf11d6e7e" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.598317 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtxx4"] Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.608594 4755 scope.go:117] "RemoveContainer" containerID="499486080dcea10f1d2da1ae2a212c9ee31f3e5646e72c4504616a7aee1b4be2" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.612872 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtxx4"] Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.680187 4755 scope.go:117] "RemoveContainer" containerID="da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694" Feb 02 23:19:19 crc kubenswrapper[4755]: E0202 23:19:19.680714 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694\": container with ID starting with da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694 not found: ID does not exist" containerID="da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.680779 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694"} err="failed to get container status \"da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694\": rpc error: code = NotFound desc = could not find container \"da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694\": container with ID starting with da7ef66aed219c1eebc1722d8a75dc5760498572d83129c34acfe7c6899d1694 not found: ID does not exist" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.680801 4755 scope.go:117] "RemoveContainer" containerID="d6417a6106ebf3d6f8351cddbe45b6487f4917ff24bb66feda0c42bdf11d6e7e" Feb 02 23:19:19 crc kubenswrapper[4755]: E0202 23:19:19.681207 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6417a6106ebf3d6f8351cddbe45b6487f4917ff24bb66feda0c42bdf11d6e7e\": container with ID starting with d6417a6106ebf3d6f8351cddbe45b6487f4917ff24bb66feda0c42bdf11d6e7e not found: ID does not exist" containerID="d6417a6106ebf3d6f8351cddbe45b6487f4917ff24bb66feda0c42bdf11d6e7e" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.681231 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6417a6106ebf3d6f8351cddbe45b6487f4917ff24bb66feda0c42bdf11d6e7e"} err="failed to get container status \"d6417a6106ebf3d6f8351cddbe45b6487f4917ff24bb66feda0c42bdf11d6e7e\": rpc error: code = NotFound desc = could not find container \"d6417a6106ebf3d6f8351cddbe45b6487f4917ff24bb66feda0c42bdf11d6e7e\": container with ID starting with d6417a6106ebf3d6f8351cddbe45b6487f4917ff24bb66feda0c42bdf11d6e7e not found: ID does not exist" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.681243 4755 scope.go:117] "RemoveContainer" containerID="499486080dcea10f1d2da1ae2a212c9ee31f3e5646e72c4504616a7aee1b4be2" Feb 02 23:19:19 crc kubenswrapper[4755]: E0202 23:19:19.681618 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"499486080dcea10f1d2da1ae2a212c9ee31f3e5646e72c4504616a7aee1b4be2\": container with ID starting with 499486080dcea10f1d2da1ae2a212c9ee31f3e5646e72c4504616a7aee1b4be2 not found: ID does not exist" containerID="499486080dcea10f1d2da1ae2a212c9ee31f3e5646e72c4504616a7aee1b4be2" Feb 02 23:19:19 crc kubenswrapper[4755]: I0202 23:19:19.681642 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"499486080dcea10f1d2da1ae2a212c9ee31f3e5646e72c4504616a7aee1b4be2"} err="failed to get container status \"499486080dcea10f1d2da1ae2a212c9ee31f3e5646e72c4504616a7aee1b4be2\": rpc error: code = NotFound desc = could not find container \"499486080dcea10f1d2da1ae2a212c9ee31f3e5646e72c4504616a7aee1b4be2\": container with ID starting with 499486080dcea10f1d2da1ae2a212c9ee31f3e5646e72c4504616a7aee1b4be2 not found: ID does not exist" Feb 02 23:19:20 crc kubenswrapper[4755]: I0202 23:19:20.158722 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lkz8r" podUID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" containerName="registry-server" probeResult="failure" output=< Feb 02 23:19:20 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Feb 02 23:19:20 crc kubenswrapper[4755]: > Feb 02 23:19:21 crc kubenswrapper[4755]: I0202 23:19:21.092350 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="470ba500-ee7c-4a6a-a36b-56d6885cb155" path="/var/lib/kubelet/pods/470ba500-ee7c-4a6a-a36b-56d6885cb155/volumes" Feb 02 23:19:23 crc kubenswrapper[4755]: I0202 23:19:23.389438 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:19:23 crc kubenswrapper[4755]: I0202 23:19:23.389916 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:19:29 crc kubenswrapper[4755]: I0202 23:19:29.180506 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:29 crc kubenswrapper[4755]: I0202 23:19:29.241700 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:29 crc kubenswrapper[4755]: I0202 23:19:29.421555 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lkz8r"] Feb 02 23:19:30 crc kubenswrapper[4755]: I0202 23:19:30.667918 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lkz8r" podUID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" containerName="registry-server" containerID="cri-o://449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a" gracePeriod=2 Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.313670 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.466285 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-catalog-content\") pod \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\" (UID: \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\") " Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.466473 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-utilities\") pod \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\" (UID: \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\") " Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.466581 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2mgn\" (UniqueName: \"kubernetes.io/projected/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-kube-api-access-b2mgn\") pod \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\" (UID: \"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925\") " Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.467507 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-utilities" (OuterVolumeSpecName: "utilities") pod "aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" (UID: "aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.484922 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-kube-api-access-b2mgn" (OuterVolumeSpecName: "kube-api-access-b2mgn") pod "aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" (UID: "aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925"). InnerVolumeSpecName "kube-api-access-b2mgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.569256 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.569288 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2mgn\" (UniqueName: \"kubernetes.io/projected/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-kube-api-access-b2mgn\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.642063 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" (UID: "aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.670705 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.679188 4755 generic.go:334] "Generic (PLEG): container finished" podID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" containerID="449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a" exitCode=0 Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.679241 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkz8r" event={"ID":"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925","Type":"ContainerDied","Data":"449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a"} Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.679269 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkz8r" event={"ID":"aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925","Type":"ContainerDied","Data":"cc727fcf39775f0f0693b01867bb5abd175ec44775f106fce07c603aff381beb"} Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.679271 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lkz8r" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.679286 4755 scope.go:117] "RemoveContainer" containerID="449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.708680 4755 scope.go:117] "RemoveContainer" containerID="2a8eed6cfe206a45ca347d84929c29d134ea532bc92eaf3fe6611b57694d84d6" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.727500 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lkz8r"] Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.740382 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lkz8r"] Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.755039 4755 scope.go:117] "RemoveContainer" containerID="b1efe0989260265c45887749fe28477635c99c8dbf491e5e758338521104c2f6" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.784507 4755 scope.go:117] "RemoveContainer" containerID="449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a" Feb 02 23:19:31 crc kubenswrapper[4755]: E0202 23:19:31.785028 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a\": container with ID starting with 449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a not found: ID does not exist" containerID="449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.785065 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a"} err="failed to get container status \"449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a\": rpc error: code = NotFound desc = could not find container \"449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a\": container with ID starting with 449a1faac5fc412dc3d0a350307922f5c4e6959baf0d9d89fe9e2457b5f1cd3a not found: ID does not exist" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.785085 4755 scope.go:117] "RemoveContainer" containerID="2a8eed6cfe206a45ca347d84929c29d134ea532bc92eaf3fe6611b57694d84d6" Feb 02 23:19:31 crc kubenswrapper[4755]: E0202 23:19:31.785406 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a8eed6cfe206a45ca347d84929c29d134ea532bc92eaf3fe6611b57694d84d6\": container with ID starting with 2a8eed6cfe206a45ca347d84929c29d134ea532bc92eaf3fe6611b57694d84d6 not found: ID does not exist" containerID="2a8eed6cfe206a45ca347d84929c29d134ea532bc92eaf3fe6611b57694d84d6" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.785426 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a8eed6cfe206a45ca347d84929c29d134ea532bc92eaf3fe6611b57694d84d6"} err="failed to get container status \"2a8eed6cfe206a45ca347d84929c29d134ea532bc92eaf3fe6611b57694d84d6\": rpc error: code = NotFound desc = could not find container \"2a8eed6cfe206a45ca347d84929c29d134ea532bc92eaf3fe6611b57694d84d6\": container with ID starting with 2a8eed6cfe206a45ca347d84929c29d134ea532bc92eaf3fe6611b57694d84d6 not found: ID does not exist" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.785439 4755 scope.go:117] "RemoveContainer" containerID="b1efe0989260265c45887749fe28477635c99c8dbf491e5e758338521104c2f6" Feb 02 23:19:31 crc kubenswrapper[4755]: E0202 23:19:31.785621 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1efe0989260265c45887749fe28477635c99c8dbf491e5e758338521104c2f6\": container with ID starting with b1efe0989260265c45887749fe28477635c99c8dbf491e5e758338521104c2f6 not found: ID does not exist" containerID="b1efe0989260265c45887749fe28477635c99c8dbf491e5e758338521104c2f6" Feb 02 23:19:31 crc kubenswrapper[4755]: I0202 23:19:31.785635 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1efe0989260265c45887749fe28477635c99c8dbf491e5e758338521104c2f6"} err="failed to get container status \"b1efe0989260265c45887749fe28477635c99c8dbf491e5e758338521104c2f6\": rpc error: code = NotFound desc = could not find container \"b1efe0989260265c45887749fe28477635c99c8dbf491e5e758338521104c2f6\": container with ID starting with b1efe0989260265c45887749fe28477635c99c8dbf491e5e758338521104c2f6 not found: ID does not exist" Feb 02 23:19:33 crc kubenswrapper[4755]: I0202 23:19:33.091824 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" path="/var/lib/kubelet/pods/aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925/volumes" Feb 02 23:19:53 crc kubenswrapper[4755]: I0202 23:19:53.389180 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:19:53 crc kubenswrapper[4755]: I0202 23:19:53.389922 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:20:23 crc kubenswrapper[4755]: I0202 23:20:23.389333 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:20:23 crc kubenswrapper[4755]: I0202 23:20:23.390242 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:20:23 crc kubenswrapper[4755]: I0202 23:20:23.390322 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 23:20:23 crc kubenswrapper[4755]: I0202 23:20:23.391705 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c437a98306916ec88f266a1ace0a3fe9dc9ba521de37a1609e0180aacf69fd42"} pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 23:20:23 crc kubenswrapper[4755]: I0202 23:20:23.391852 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" containerID="cri-o://c437a98306916ec88f266a1ace0a3fe9dc9ba521de37a1609e0180aacf69fd42" gracePeriod=600 Feb 02 23:20:24 crc kubenswrapper[4755]: I0202 23:20:24.405473 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerID="c437a98306916ec88f266a1ace0a3fe9dc9ba521de37a1609e0180aacf69fd42" exitCode=0 Feb 02 23:20:24 crc kubenswrapper[4755]: I0202 23:20:24.405566 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerDied","Data":"c437a98306916ec88f266a1ace0a3fe9dc9ba521de37a1609e0180aacf69fd42"} Feb 02 23:20:24 crc kubenswrapper[4755]: I0202 23:20:24.406093 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0"} Feb 02 23:20:24 crc kubenswrapper[4755]: I0202 23:20:24.406123 4755 scope.go:117] "RemoveContainer" containerID="79e61a70a217ee3ac3fcd1db00cb155aa71f4baa0d30aa805cb970792bac3089" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.128451 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-vzkhl/must-gather-wxxgt"] Feb 02 23:20:46 crc kubenswrapper[4755]: E0202 23:20:46.129368 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="470ba500-ee7c-4a6a-a36b-56d6885cb155" containerName="extract-utilities" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.129384 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="470ba500-ee7c-4a6a-a36b-56d6885cb155" containerName="extract-utilities" Feb 02 23:20:46 crc kubenswrapper[4755]: E0202 23:20:46.129405 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" containerName="extract-content" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.129411 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" containerName="extract-content" Feb 02 23:20:46 crc kubenswrapper[4755]: E0202 23:20:46.129420 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="470ba500-ee7c-4a6a-a36b-56d6885cb155" containerName="extract-content" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.129427 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="470ba500-ee7c-4a6a-a36b-56d6885cb155" containerName="extract-content" Feb 02 23:20:46 crc kubenswrapper[4755]: E0202 23:20:46.129441 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="470ba500-ee7c-4a6a-a36b-56d6885cb155" containerName="registry-server" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.129447 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="470ba500-ee7c-4a6a-a36b-56d6885cb155" containerName="registry-server" Feb 02 23:20:46 crc kubenswrapper[4755]: E0202 23:20:46.129468 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" containerName="extract-utilities" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.129473 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" containerName="extract-utilities" Feb 02 23:20:46 crc kubenswrapper[4755]: E0202 23:20:46.129483 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" containerName="registry-server" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.129489 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" containerName="registry-server" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.129689 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="470ba500-ee7c-4a6a-a36b-56d6885cb155" containerName="registry-server" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.129700 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="aec7ea0e-c0b6-4aed-ad7f-cc0d12f6c925" containerName="registry-server" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.130841 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vzkhl/must-gather-wxxgt" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.133209 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-vzkhl"/"kube-root-ca.crt" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.134879 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-vzkhl"/"openshift-service-ca.crt" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.160711 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-vzkhl/must-gather-wxxgt"] Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.310060 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnlz5\" (UniqueName: \"kubernetes.io/projected/9759deb7-577e-46c5-b707-23b2025eec70-kube-api-access-jnlz5\") pod \"must-gather-wxxgt\" (UID: \"9759deb7-577e-46c5-b707-23b2025eec70\") " pod="openshift-must-gather-vzkhl/must-gather-wxxgt" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.310102 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9759deb7-577e-46c5-b707-23b2025eec70-must-gather-output\") pod \"must-gather-wxxgt\" (UID: \"9759deb7-577e-46c5-b707-23b2025eec70\") " pod="openshift-must-gather-vzkhl/must-gather-wxxgt" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.411490 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnlz5\" (UniqueName: \"kubernetes.io/projected/9759deb7-577e-46c5-b707-23b2025eec70-kube-api-access-jnlz5\") pod \"must-gather-wxxgt\" (UID: \"9759deb7-577e-46c5-b707-23b2025eec70\") " pod="openshift-must-gather-vzkhl/must-gather-wxxgt" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.411537 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9759deb7-577e-46c5-b707-23b2025eec70-must-gather-output\") pod \"must-gather-wxxgt\" (UID: \"9759deb7-577e-46c5-b707-23b2025eec70\") " pod="openshift-must-gather-vzkhl/must-gather-wxxgt" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.412044 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9759deb7-577e-46c5-b707-23b2025eec70-must-gather-output\") pod \"must-gather-wxxgt\" (UID: \"9759deb7-577e-46c5-b707-23b2025eec70\") " pod="openshift-must-gather-vzkhl/must-gather-wxxgt" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.434400 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnlz5\" (UniqueName: \"kubernetes.io/projected/9759deb7-577e-46c5-b707-23b2025eec70-kube-api-access-jnlz5\") pod \"must-gather-wxxgt\" (UID: \"9759deb7-577e-46c5-b707-23b2025eec70\") " pod="openshift-must-gather-vzkhl/must-gather-wxxgt" Feb 02 23:20:46 crc kubenswrapper[4755]: I0202 23:20:46.461900 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vzkhl/must-gather-wxxgt" Feb 02 23:20:47 crc kubenswrapper[4755]: I0202 23:20:47.090914 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-vzkhl/must-gather-wxxgt"] Feb 02 23:20:47 crc kubenswrapper[4755]: I0202 23:20:47.748113 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vzkhl/must-gather-wxxgt" event={"ID":"9759deb7-577e-46c5-b707-23b2025eec70","Type":"ContainerStarted","Data":"ed1e3b8167fc22fa1e6c4ad30e039e18ff29df252f035ea31ed62c246820ab38"} Feb 02 23:20:51 crc kubenswrapper[4755]: I0202 23:20:51.788828 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vzkhl/must-gather-wxxgt" event={"ID":"9759deb7-577e-46c5-b707-23b2025eec70","Type":"ContainerStarted","Data":"642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc"} Feb 02 23:20:52 crc kubenswrapper[4755]: I0202 23:20:52.802000 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vzkhl/must-gather-wxxgt" event={"ID":"9759deb7-577e-46c5-b707-23b2025eec70","Type":"ContainerStarted","Data":"6e802f55cf295d6735244d4ab0459f2d11e3bd8f659ef37453f7532ad576d752"} Feb 02 23:20:52 crc kubenswrapper[4755]: I0202 23:20:52.825528 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-vzkhl/must-gather-wxxgt" podStartSLOduration=2.596797128 podStartE2EDuration="6.825504842s" podCreationTimestamp="2026-02-02 23:20:46 +0000 UTC" firstStartedPulling="2026-02-02 23:20:47.090389696 +0000 UTC m=+2802.781610012" lastFinishedPulling="2026-02-02 23:20:51.3190974 +0000 UTC m=+2807.010317726" observedRunningTime="2026-02-02 23:20:52.820539102 +0000 UTC m=+2808.511759428" watchObservedRunningTime="2026-02-02 23:20:52.825504842 +0000 UTC m=+2808.516725208" Feb 02 23:20:54 crc kubenswrapper[4755]: E0202 23:20:54.841279 4755 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.36:50630->38.102.83.36:42873: write tcp 38.102.83.36:50630->38.102.83.36:42873: write: broken pipe Feb 02 23:20:55 crc kubenswrapper[4755]: I0202 23:20:55.284850 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-vzkhl/crc-debug-sp8rt"] Feb 02 23:20:55 crc kubenswrapper[4755]: I0202 23:20:55.286699 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" Feb 02 23:20:55 crc kubenswrapper[4755]: I0202 23:20:55.292420 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-vzkhl"/"default-dockercfg-cwq8h" Feb 02 23:20:55 crc kubenswrapper[4755]: I0202 23:20:55.405949 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8pxn\" (UniqueName: \"kubernetes.io/projected/ceeffcfc-e594-47f4-8bc9-b97f2464eed4-kube-api-access-t8pxn\") pod \"crc-debug-sp8rt\" (UID: \"ceeffcfc-e594-47f4-8bc9-b97f2464eed4\") " pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" Feb 02 23:20:55 crc kubenswrapper[4755]: I0202 23:20:55.406126 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ceeffcfc-e594-47f4-8bc9-b97f2464eed4-host\") pod \"crc-debug-sp8rt\" (UID: \"ceeffcfc-e594-47f4-8bc9-b97f2464eed4\") " pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" Feb 02 23:20:55 crc kubenswrapper[4755]: I0202 23:20:55.508358 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ceeffcfc-e594-47f4-8bc9-b97f2464eed4-host\") pod \"crc-debug-sp8rt\" (UID: \"ceeffcfc-e594-47f4-8bc9-b97f2464eed4\") " pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" Feb 02 23:20:55 crc kubenswrapper[4755]: I0202 23:20:55.508521 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8pxn\" (UniqueName: \"kubernetes.io/projected/ceeffcfc-e594-47f4-8bc9-b97f2464eed4-kube-api-access-t8pxn\") pod \"crc-debug-sp8rt\" (UID: \"ceeffcfc-e594-47f4-8bc9-b97f2464eed4\") " pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" Feb 02 23:20:55 crc kubenswrapper[4755]: I0202 23:20:55.508507 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ceeffcfc-e594-47f4-8bc9-b97f2464eed4-host\") pod \"crc-debug-sp8rt\" (UID: \"ceeffcfc-e594-47f4-8bc9-b97f2464eed4\") " pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" Feb 02 23:20:55 crc kubenswrapper[4755]: I0202 23:20:55.530870 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8pxn\" (UniqueName: \"kubernetes.io/projected/ceeffcfc-e594-47f4-8bc9-b97f2464eed4-kube-api-access-t8pxn\") pod \"crc-debug-sp8rt\" (UID: \"ceeffcfc-e594-47f4-8bc9-b97f2464eed4\") " pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" Feb 02 23:20:55 crc kubenswrapper[4755]: I0202 23:20:55.603932 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" Feb 02 23:20:55 crc kubenswrapper[4755]: I0202 23:20:55.837641 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" event={"ID":"ceeffcfc-e594-47f4-8bc9-b97f2464eed4","Type":"ContainerStarted","Data":"0558b19fe6fa486077c2c05ba44f0bf638a16f7068e34408aaabf3793af013c9"} Feb 02 23:20:56 crc kubenswrapper[4755]: E0202 23:20:56.952958 4755 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.36:50612->38.102.83.36:42873: read tcp 38.102.83.36:50612->38.102.83.36:42873: read: connection reset by peer Feb 02 23:21:07 crc kubenswrapper[4755]: I0202 23:21:07.949397 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" event={"ID":"ceeffcfc-e594-47f4-8bc9-b97f2464eed4","Type":"ContainerStarted","Data":"4a0f1945919975ff6e40d0f951a3543b91105ddd750aac2fe397708fae15d826"} Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.816322 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" podStartSLOduration=11.202657532 podStartE2EDuration="22.816305265s" podCreationTimestamp="2026-02-02 23:20:55 +0000 UTC" firstStartedPulling="2026-02-02 23:20:55.641385012 +0000 UTC m=+2811.332605338" lastFinishedPulling="2026-02-02 23:21:07.255032745 +0000 UTC m=+2822.946253071" observedRunningTime="2026-02-02 23:21:07.971236122 +0000 UTC m=+2823.662456448" watchObservedRunningTime="2026-02-02 23:21:17.816305265 +0000 UTC m=+2833.507525591" Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.818142 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-slk5m"] Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.820220 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.837513 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596cd905-0e7d-41ba-a535-ffa0f90820ec-catalog-content\") pod \"certified-operators-slk5m\" (UID: \"596cd905-0e7d-41ba-a535-ffa0f90820ec\") " pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.837618 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596cd905-0e7d-41ba-a535-ffa0f90820ec-utilities\") pod \"certified-operators-slk5m\" (UID: \"596cd905-0e7d-41ba-a535-ffa0f90820ec\") " pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.837707 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gv64f\" (UniqueName: \"kubernetes.io/projected/596cd905-0e7d-41ba-a535-ffa0f90820ec-kube-api-access-gv64f\") pod \"certified-operators-slk5m\" (UID: \"596cd905-0e7d-41ba-a535-ffa0f90820ec\") " pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.840668 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-slk5m"] Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.939947 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gv64f\" (UniqueName: \"kubernetes.io/projected/596cd905-0e7d-41ba-a535-ffa0f90820ec-kube-api-access-gv64f\") pod \"certified-operators-slk5m\" (UID: \"596cd905-0e7d-41ba-a535-ffa0f90820ec\") " pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.940075 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596cd905-0e7d-41ba-a535-ffa0f90820ec-catalog-content\") pod \"certified-operators-slk5m\" (UID: \"596cd905-0e7d-41ba-a535-ffa0f90820ec\") " pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.940134 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596cd905-0e7d-41ba-a535-ffa0f90820ec-utilities\") pod \"certified-operators-slk5m\" (UID: \"596cd905-0e7d-41ba-a535-ffa0f90820ec\") " pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.940521 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596cd905-0e7d-41ba-a535-ffa0f90820ec-catalog-content\") pod \"certified-operators-slk5m\" (UID: \"596cd905-0e7d-41ba-a535-ffa0f90820ec\") " pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.940540 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596cd905-0e7d-41ba-a535-ffa0f90820ec-utilities\") pod \"certified-operators-slk5m\" (UID: \"596cd905-0e7d-41ba-a535-ffa0f90820ec\") " pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:17 crc kubenswrapper[4755]: I0202 23:21:17.960217 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gv64f\" (UniqueName: \"kubernetes.io/projected/596cd905-0e7d-41ba-a535-ffa0f90820ec-kube-api-access-gv64f\") pod \"certified-operators-slk5m\" (UID: \"596cd905-0e7d-41ba-a535-ffa0f90820ec\") " pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:18 crc kubenswrapper[4755]: I0202 23:21:18.160908 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:19 crc kubenswrapper[4755]: I0202 23:21:19.530476 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-slk5m"] Feb 02 23:21:20 crc kubenswrapper[4755]: I0202 23:21:20.056112 4755 generic.go:334] "Generic (PLEG): container finished" podID="596cd905-0e7d-41ba-a535-ffa0f90820ec" containerID="869786f574a7f08128e51cdfbced67de2252e106dc4c87b59e88c467ee851a06" exitCode=0 Feb 02 23:21:20 crc kubenswrapper[4755]: I0202 23:21:20.056274 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-slk5m" event={"ID":"596cd905-0e7d-41ba-a535-ffa0f90820ec","Type":"ContainerDied","Data":"869786f574a7f08128e51cdfbced67de2252e106dc4c87b59e88c467ee851a06"} Feb 02 23:21:20 crc kubenswrapper[4755]: I0202 23:21:20.056359 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-slk5m" event={"ID":"596cd905-0e7d-41ba-a535-ffa0f90820ec","Type":"ContainerStarted","Data":"404e5c9c8cb34ddd8f939bfa573fa13ef5086aa4ab1221d87678295d2c218a40"} Feb 02 23:21:26 crc kubenswrapper[4755]: I0202 23:21:26.119931 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-slk5m" event={"ID":"596cd905-0e7d-41ba-a535-ffa0f90820ec","Type":"ContainerStarted","Data":"646959f2ea7ff0eceb407cb84ec23053fadb10a03db76c4149e60fb91cd1b97a"} Feb 02 23:21:27 crc kubenswrapper[4755]: I0202 23:21:27.130533 4755 generic.go:334] "Generic (PLEG): container finished" podID="596cd905-0e7d-41ba-a535-ffa0f90820ec" containerID="646959f2ea7ff0eceb407cb84ec23053fadb10a03db76c4149e60fb91cd1b97a" exitCode=0 Feb 02 23:21:27 crc kubenswrapper[4755]: I0202 23:21:27.130648 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-slk5m" event={"ID":"596cd905-0e7d-41ba-a535-ffa0f90820ec","Type":"ContainerDied","Data":"646959f2ea7ff0eceb407cb84ec23053fadb10a03db76c4149e60fb91cd1b97a"} Feb 02 23:21:29 crc kubenswrapper[4755]: I0202 23:21:29.155519 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-slk5m" event={"ID":"596cd905-0e7d-41ba-a535-ffa0f90820ec","Type":"ContainerStarted","Data":"823c06b2fd5f5ca6f1115bb411634b715058163869dac2ddefc826ed062a4f11"} Feb 02 23:21:29 crc kubenswrapper[4755]: I0202 23:21:29.176104 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-slk5m" podStartSLOduration=4.429141843 podStartE2EDuration="12.176084384s" podCreationTimestamp="2026-02-02 23:21:17 +0000 UTC" firstStartedPulling="2026-02-02 23:21:20.058498475 +0000 UTC m=+2835.749718831" lastFinishedPulling="2026-02-02 23:21:27.805441046 +0000 UTC m=+2843.496661372" observedRunningTime="2026-02-02 23:21:29.17169213 +0000 UTC m=+2844.862912456" watchObservedRunningTime="2026-02-02 23:21:29.176084384 +0000 UTC m=+2844.867304710" Feb 02 23:21:31 crc kubenswrapper[4755]: I0202 23:21:31.178984 4755 generic.go:334] "Generic (PLEG): container finished" podID="ceeffcfc-e594-47f4-8bc9-b97f2464eed4" containerID="4a0f1945919975ff6e40d0f951a3543b91105ddd750aac2fe397708fae15d826" exitCode=0 Feb 02 23:21:31 crc kubenswrapper[4755]: I0202 23:21:31.179035 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" event={"ID":"ceeffcfc-e594-47f4-8bc9-b97f2464eed4","Type":"ContainerDied","Data":"4a0f1945919975ff6e40d0f951a3543b91105ddd750aac2fe397708fae15d826"} Feb 02 23:21:32 crc kubenswrapper[4755]: I0202 23:21:32.325079 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" Feb 02 23:21:32 crc kubenswrapper[4755]: I0202 23:21:32.363337 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-vzkhl/crc-debug-sp8rt"] Feb 02 23:21:32 crc kubenswrapper[4755]: I0202 23:21:32.373632 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-vzkhl/crc-debug-sp8rt"] Feb 02 23:21:32 crc kubenswrapper[4755]: I0202 23:21:32.400492 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ceeffcfc-e594-47f4-8bc9-b97f2464eed4-host\") pod \"ceeffcfc-e594-47f4-8bc9-b97f2464eed4\" (UID: \"ceeffcfc-e594-47f4-8bc9-b97f2464eed4\") " Feb 02 23:21:32 crc kubenswrapper[4755]: I0202 23:21:32.400612 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ceeffcfc-e594-47f4-8bc9-b97f2464eed4-host" (OuterVolumeSpecName: "host") pod "ceeffcfc-e594-47f4-8bc9-b97f2464eed4" (UID: "ceeffcfc-e594-47f4-8bc9-b97f2464eed4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 23:21:32 crc kubenswrapper[4755]: I0202 23:21:32.400666 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8pxn\" (UniqueName: \"kubernetes.io/projected/ceeffcfc-e594-47f4-8bc9-b97f2464eed4-kube-api-access-t8pxn\") pod \"ceeffcfc-e594-47f4-8bc9-b97f2464eed4\" (UID: \"ceeffcfc-e594-47f4-8bc9-b97f2464eed4\") " Feb 02 23:21:32 crc kubenswrapper[4755]: I0202 23:21:32.401523 4755 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ceeffcfc-e594-47f4-8bc9-b97f2464eed4-host\") on node \"crc\" DevicePath \"\"" Feb 02 23:21:32 crc kubenswrapper[4755]: I0202 23:21:32.406414 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ceeffcfc-e594-47f4-8bc9-b97f2464eed4-kube-api-access-t8pxn" (OuterVolumeSpecName: "kube-api-access-t8pxn") pod "ceeffcfc-e594-47f4-8bc9-b97f2464eed4" (UID: "ceeffcfc-e594-47f4-8bc9-b97f2464eed4"). InnerVolumeSpecName "kube-api-access-t8pxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:21:32 crc kubenswrapper[4755]: I0202 23:21:32.503837 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8pxn\" (UniqueName: \"kubernetes.io/projected/ceeffcfc-e594-47f4-8bc9-b97f2464eed4-kube-api-access-t8pxn\") on node \"crc\" DevicePath \"\"" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.084597 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ceeffcfc-e594-47f4-8bc9-b97f2464eed4" path="/var/lib/kubelet/pods/ceeffcfc-e594-47f4-8bc9-b97f2464eed4/volumes" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.200904 4755 scope.go:117] "RemoveContainer" containerID="4a0f1945919975ff6e40d0f951a3543b91105ddd750aac2fe397708fae15d826" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.201021 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vzkhl/crc-debug-sp8rt" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.588518 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-vzkhl/crc-debug-jwpgk"] Feb 02 23:21:33 crc kubenswrapper[4755]: E0202 23:21:33.589208 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ceeffcfc-e594-47f4-8bc9-b97f2464eed4" containerName="container-00" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.589231 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="ceeffcfc-e594-47f4-8bc9-b97f2464eed4" containerName="container-00" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.589584 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="ceeffcfc-e594-47f4-8bc9-b97f2464eed4" containerName="container-00" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.590665 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vzkhl/crc-debug-jwpgk" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.592575 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-vzkhl"/"default-dockercfg-cwq8h" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.626023 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c061ddea-3f10-4e51-8da7-2c4ebc556887-host\") pod \"crc-debug-jwpgk\" (UID: \"c061ddea-3f10-4e51-8da7-2c4ebc556887\") " pod="openshift-must-gather-vzkhl/crc-debug-jwpgk" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.626300 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsng5\" (UniqueName: \"kubernetes.io/projected/c061ddea-3f10-4e51-8da7-2c4ebc556887-kube-api-access-gsng5\") pod \"crc-debug-jwpgk\" (UID: \"c061ddea-3f10-4e51-8da7-2c4ebc556887\") " pod="openshift-must-gather-vzkhl/crc-debug-jwpgk" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.728248 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c061ddea-3f10-4e51-8da7-2c4ebc556887-host\") pod \"crc-debug-jwpgk\" (UID: \"c061ddea-3f10-4e51-8da7-2c4ebc556887\") " pod="openshift-must-gather-vzkhl/crc-debug-jwpgk" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.728634 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsng5\" (UniqueName: \"kubernetes.io/projected/c061ddea-3f10-4e51-8da7-2c4ebc556887-kube-api-access-gsng5\") pod \"crc-debug-jwpgk\" (UID: \"c061ddea-3f10-4e51-8da7-2c4ebc556887\") " pod="openshift-must-gather-vzkhl/crc-debug-jwpgk" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.728400 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c061ddea-3f10-4e51-8da7-2c4ebc556887-host\") pod \"crc-debug-jwpgk\" (UID: \"c061ddea-3f10-4e51-8da7-2c4ebc556887\") " pod="openshift-must-gather-vzkhl/crc-debug-jwpgk" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.758919 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsng5\" (UniqueName: \"kubernetes.io/projected/c061ddea-3f10-4e51-8da7-2c4ebc556887-kube-api-access-gsng5\") pod \"crc-debug-jwpgk\" (UID: \"c061ddea-3f10-4e51-8da7-2c4ebc556887\") " pod="openshift-must-gather-vzkhl/crc-debug-jwpgk" Feb 02 23:21:33 crc kubenswrapper[4755]: I0202 23:21:33.914655 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vzkhl/crc-debug-jwpgk" Feb 02 23:21:34 crc kubenswrapper[4755]: I0202 23:21:34.215110 4755 generic.go:334] "Generic (PLEG): container finished" podID="c061ddea-3f10-4e51-8da7-2c4ebc556887" containerID="8df61a7f5e55b83c49acf846a3d16a258bcc88636a1f0dcaf27a2ced1c133726" exitCode=1 Feb 02 23:21:34 crc kubenswrapper[4755]: I0202 23:21:34.215177 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vzkhl/crc-debug-jwpgk" event={"ID":"c061ddea-3f10-4e51-8da7-2c4ebc556887","Type":"ContainerDied","Data":"8df61a7f5e55b83c49acf846a3d16a258bcc88636a1f0dcaf27a2ced1c133726"} Feb 02 23:21:34 crc kubenswrapper[4755]: I0202 23:21:34.215759 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vzkhl/crc-debug-jwpgk" event={"ID":"c061ddea-3f10-4e51-8da7-2c4ebc556887","Type":"ContainerStarted","Data":"ce3be97c01cfc73357dfcfd9cacb855af9aca7d3535b910298e35d5d3727d9db"} Feb 02 23:21:34 crc kubenswrapper[4755]: I0202 23:21:34.253272 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-vzkhl/crc-debug-jwpgk"] Feb 02 23:21:34 crc kubenswrapper[4755]: I0202 23:21:34.262021 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-vzkhl/crc-debug-jwpgk"] Feb 02 23:21:35 crc kubenswrapper[4755]: I0202 23:21:35.332228 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vzkhl/crc-debug-jwpgk" Feb 02 23:21:35 crc kubenswrapper[4755]: I0202 23:21:35.362113 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsng5\" (UniqueName: \"kubernetes.io/projected/c061ddea-3f10-4e51-8da7-2c4ebc556887-kube-api-access-gsng5\") pod \"c061ddea-3f10-4e51-8da7-2c4ebc556887\" (UID: \"c061ddea-3f10-4e51-8da7-2c4ebc556887\") " Feb 02 23:21:35 crc kubenswrapper[4755]: I0202 23:21:35.362505 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c061ddea-3f10-4e51-8da7-2c4ebc556887-host\") pod \"c061ddea-3f10-4e51-8da7-2c4ebc556887\" (UID: \"c061ddea-3f10-4e51-8da7-2c4ebc556887\") " Feb 02 23:21:35 crc kubenswrapper[4755]: I0202 23:21:35.362571 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c061ddea-3f10-4e51-8da7-2c4ebc556887-host" (OuterVolumeSpecName: "host") pod "c061ddea-3f10-4e51-8da7-2c4ebc556887" (UID: "c061ddea-3f10-4e51-8da7-2c4ebc556887"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 23:21:35 crc kubenswrapper[4755]: I0202 23:21:35.363093 4755 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c061ddea-3f10-4e51-8da7-2c4ebc556887-host\") on node \"crc\" DevicePath \"\"" Feb 02 23:21:35 crc kubenswrapper[4755]: I0202 23:21:35.369959 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c061ddea-3f10-4e51-8da7-2c4ebc556887-kube-api-access-gsng5" (OuterVolumeSpecName: "kube-api-access-gsng5") pod "c061ddea-3f10-4e51-8da7-2c4ebc556887" (UID: "c061ddea-3f10-4e51-8da7-2c4ebc556887"). InnerVolumeSpecName "kube-api-access-gsng5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:21:35 crc kubenswrapper[4755]: I0202 23:21:35.465143 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsng5\" (UniqueName: \"kubernetes.io/projected/c061ddea-3f10-4e51-8da7-2c4ebc556887-kube-api-access-gsng5\") on node \"crc\" DevicePath \"\"" Feb 02 23:21:36 crc kubenswrapper[4755]: I0202 23:21:36.239861 4755 scope.go:117] "RemoveContainer" containerID="8df61a7f5e55b83c49acf846a3d16a258bcc88636a1f0dcaf27a2ced1c133726" Feb 02 23:21:36 crc kubenswrapper[4755]: I0202 23:21:36.239886 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vzkhl/crc-debug-jwpgk" Feb 02 23:21:37 crc kubenswrapper[4755]: I0202 23:21:37.080561 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c061ddea-3f10-4e51-8da7-2c4ebc556887" path="/var/lib/kubelet/pods/c061ddea-3f10-4e51-8da7-2c4ebc556887/volumes" Feb 02 23:21:38 crc kubenswrapper[4755]: I0202 23:21:38.162085 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:38 crc kubenswrapper[4755]: I0202 23:21:38.162513 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:38 crc kubenswrapper[4755]: I0202 23:21:38.228780 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:38 crc kubenswrapper[4755]: I0202 23:21:38.321296 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-slk5m" Feb 02 23:21:38 crc kubenswrapper[4755]: I0202 23:21:38.409774 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-slk5m"] Feb 02 23:21:38 crc kubenswrapper[4755]: I0202 23:21:38.478623 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gwz27"] Feb 02 23:21:38 crc kubenswrapper[4755]: I0202 23:21:38.478921 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gwz27" podUID="31e5d7ec-069a-4def-b27c-8c8418f98ce4" containerName="registry-server" containerID="cri-o://c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172" gracePeriod=2 Feb 02 23:21:38 crc kubenswrapper[4755]: I0202 23:21:38.953508 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gwz27" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.049866 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e5d7ec-069a-4def-b27c-8c8418f98ce4-utilities\") pod \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\" (UID: \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\") " Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.050034 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e5d7ec-069a-4def-b27c-8c8418f98ce4-catalog-content\") pod \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\" (UID: \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\") " Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.050084 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxtrm\" (UniqueName: \"kubernetes.io/projected/31e5d7ec-069a-4def-b27c-8c8418f98ce4-kube-api-access-nxtrm\") pod \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\" (UID: \"31e5d7ec-069a-4def-b27c-8c8418f98ce4\") " Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.051668 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31e5d7ec-069a-4def-b27c-8c8418f98ce4-utilities" (OuterVolumeSpecName: "utilities") pod "31e5d7ec-069a-4def-b27c-8c8418f98ce4" (UID: "31e5d7ec-069a-4def-b27c-8c8418f98ce4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.058940 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31e5d7ec-069a-4def-b27c-8c8418f98ce4-kube-api-access-nxtrm" (OuterVolumeSpecName: "kube-api-access-nxtrm") pod "31e5d7ec-069a-4def-b27c-8c8418f98ce4" (UID: "31e5d7ec-069a-4def-b27c-8c8418f98ce4"). InnerVolumeSpecName "kube-api-access-nxtrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.124552 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31e5d7ec-069a-4def-b27c-8c8418f98ce4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "31e5d7ec-069a-4def-b27c-8c8418f98ce4" (UID: "31e5d7ec-069a-4def-b27c-8c8418f98ce4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.152195 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e5d7ec-069a-4def-b27c-8c8418f98ce4-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.152229 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e5d7ec-069a-4def-b27c-8c8418f98ce4-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.152240 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxtrm\" (UniqueName: \"kubernetes.io/projected/31e5d7ec-069a-4def-b27c-8c8418f98ce4-kube-api-access-nxtrm\") on node \"crc\" DevicePath \"\"" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.276847 4755 generic.go:334] "Generic (PLEG): container finished" podID="31e5d7ec-069a-4def-b27c-8c8418f98ce4" containerID="c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172" exitCode=0 Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.276979 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gwz27" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.282784 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwz27" event={"ID":"31e5d7ec-069a-4def-b27c-8c8418f98ce4","Type":"ContainerDied","Data":"c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172"} Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.282835 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gwz27" event={"ID":"31e5d7ec-069a-4def-b27c-8c8418f98ce4","Type":"ContainerDied","Data":"c9e9a797da9a958d65b9097394cee387937ac824b5462365beb8307ce3576dfa"} Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.282853 4755 scope.go:117] "RemoveContainer" containerID="c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.316680 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gwz27"] Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.330574 4755 scope.go:117] "RemoveContainer" containerID="0d8dfeb813704fd0e6058fc2ff777f35d4a28267b60cfebd8f343c88658b235d" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.337872 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gwz27"] Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.362553 4755 scope.go:117] "RemoveContainer" containerID="0fe61b2f4856e7fbbc79b0a66c88250c1554907769b6679f3e9ddfe81298f435" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.401050 4755 scope.go:117] "RemoveContainer" containerID="c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172" Feb 02 23:21:39 crc kubenswrapper[4755]: E0202 23:21:39.401570 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172\": container with ID starting with c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172 not found: ID does not exist" containerID="c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.401624 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172"} err="failed to get container status \"c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172\": rpc error: code = NotFound desc = could not find container \"c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172\": container with ID starting with c4519b0b1f6f9f1eb5afda4a665235b7f3e53c39f9bee73e08be86795347f172 not found: ID does not exist" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.401658 4755 scope.go:117] "RemoveContainer" containerID="0d8dfeb813704fd0e6058fc2ff777f35d4a28267b60cfebd8f343c88658b235d" Feb 02 23:21:39 crc kubenswrapper[4755]: E0202 23:21:39.402124 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d8dfeb813704fd0e6058fc2ff777f35d4a28267b60cfebd8f343c88658b235d\": container with ID starting with 0d8dfeb813704fd0e6058fc2ff777f35d4a28267b60cfebd8f343c88658b235d not found: ID does not exist" containerID="0d8dfeb813704fd0e6058fc2ff777f35d4a28267b60cfebd8f343c88658b235d" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.402223 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d8dfeb813704fd0e6058fc2ff777f35d4a28267b60cfebd8f343c88658b235d"} err="failed to get container status \"0d8dfeb813704fd0e6058fc2ff777f35d4a28267b60cfebd8f343c88658b235d\": rpc error: code = NotFound desc = could not find container \"0d8dfeb813704fd0e6058fc2ff777f35d4a28267b60cfebd8f343c88658b235d\": container with ID starting with 0d8dfeb813704fd0e6058fc2ff777f35d4a28267b60cfebd8f343c88658b235d not found: ID does not exist" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.402311 4755 scope.go:117] "RemoveContainer" containerID="0fe61b2f4856e7fbbc79b0a66c88250c1554907769b6679f3e9ddfe81298f435" Feb 02 23:21:39 crc kubenswrapper[4755]: E0202 23:21:39.402938 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fe61b2f4856e7fbbc79b0a66c88250c1554907769b6679f3e9ddfe81298f435\": container with ID starting with 0fe61b2f4856e7fbbc79b0a66c88250c1554907769b6679f3e9ddfe81298f435 not found: ID does not exist" containerID="0fe61b2f4856e7fbbc79b0a66c88250c1554907769b6679f3e9ddfe81298f435" Feb 02 23:21:39 crc kubenswrapper[4755]: I0202 23:21:39.402977 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fe61b2f4856e7fbbc79b0a66c88250c1554907769b6679f3e9ddfe81298f435"} err="failed to get container status \"0fe61b2f4856e7fbbc79b0a66c88250c1554907769b6679f3e9ddfe81298f435\": rpc error: code = NotFound desc = could not find container \"0fe61b2f4856e7fbbc79b0a66c88250c1554907769b6679f3e9ddfe81298f435\": container with ID starting with 0fe61b2f4856e7fbbc79b0a66c88250c1554907769b6679f3e9ddfe81298f435 not found: ID does not exist" Feb 02 23:21:41 crc kubenswrapper[4755]: I0202 23:21:41.082390 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31e5d7ec-069a-4def-b27c-8c8418f98ce4" path="/var/lib/kubelet/pods/31e5d7ec-069a-4def-b27c-8c8418f98ce4/volumes" Feb 02 23:22:23 crc kubenswrapper[4755]: I0202 23:22:23.389454 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:22:23 crc kubenswrapper[4755]: I0202 23:22:23.390203 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:22:26 crc kubenswrapper[4755]: I0202 23:22:26.848198 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_9664cb92-62aa-4d52-9936-96c48dc7c8d2/init-config-reloader/0.log" Feb 02 23:22:27 crc kubenswrapper[4755]: I0202 23:22:27.088125 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_9664cb92-62aa-4d52-9936-96c48dc7c8d2/alertmanager/0.log" Feb 02 23:22:27 crc kubenswrapper[4755]: I0202 23:22:27.096773 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_9664cb92-62aa-4d52-9936-96c48dc7c8d2/config-reloader/0.log" Feb 02 23:22:27 crc kubenswrapper[4755]: I0202 23:22:27.119752 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_9664cb92-62aa-4d52-9936-96c48dc7c8d2/init-config-reloader/0.log" Feb 02 23:22:27 crc kubenswrapper[4755]: I0202 23:22:27.257334 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-75f5dc8786-9gzp2_ff86278c-f2b5-405a-a79c-f192d8aba1d5/barbican-api/0.log" Feb 02 23:22:27 crc kubenswrapper[4755]: I0202 23:22:27.260294 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-75f5dc8786-9gzp2_ff86278c-f2b5-405a-a79c-f192d8aba1d5/barbican-api-log/0.log" Feb 02 23:22:27 crc kubenswrapper[4755]: I0202 23:22:27.348838 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58cb968fcd-5t5jk_3838c6a5-bf59-4820-a400-0877e82598f6/barbican-keystone-listener/0.log" Feb 02 23:22:27 crc kubenswrapper[4755]: I0202 23:22:27.478517 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58cb968fcd-5t5jk_3838c6a5-bf59-4820-a400-0877e82598f6/barbican-keystone-listener-log/0.log" Feb 02 23:22:27 crc kubenswrapper[4755]: I0202 23:22:27.565965 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7cbd488799-dtxcz_11097ba4-a9db-49eb-a685-fbb2a15e6d6a/barbican-worker/0.log" Feb 02 23:22:27 crc kubenswrapper[4755]: I0202 23:22:27.617592 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7cbd488799-dtxcz_11097ba4-a9db-49eb-a685-fbb2a15e6d6a/barbican-worker-log/0.log" Feb 02 23:22:27 crc kubenswrapper[4755]: I0202 23:22:27.766608 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-kr2sj_3501887a-bec4-43bd-b0ed-1daf65ae1331/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:27 crc kubenswrapper[4755]: I0202 23:22:27.837047 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3f2efd24-009b-4ad3-a07f-7d1d583e4bee/ceilometer-central-agent/0.log" Feb 02 23:22:27 crc kubenswrapper[4755]: I0202 23:22:27.951450 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3f2efd24-009b-4ad3-a07f-7d1d583e4bee/ceilometer-notification-agent/0.log" Feb 02 23:22:28 crc kubenswrapper[4755]: I0202 23:22:28.036127 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3f2efd24-009b-4ad3-a07f-7d1d583e4bee/proxy-httpd/0.log" Feb 02 23:22:28 crc kubenswrapper[4755]: I0202 23:22:28.099494 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3f2efd24-009b-4ad3-a07f-7d1d583e4bee/sg-core/0.log" Feb 02 23:22:28 crc kubenswrapper[4755]: I0202 23:22:28.167058 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_8df73ba5-ff8c-49a8-a923-2e7c957fb043/cinder-api/0.log" Feb 02 23:22:28 crc kubenswrapper[4755]: I0202 23:22:28.222079 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_8df73ba5-ff8c-49a8-a923-2e7c957fb043/cinder-api-log/0.log" Feb 02 23:22:28 crc kubenswrapper[4755]: I0202 23:22:28.333928 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_35c710b6-d258-4242-99f7-c1f3216cfc0c/cinder-scheduler/0.log" Feb 02 23:22:28 crc kubenswrapper[4755]: I0202 23:22:28.385763 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_35c710b6-d258-4242-99f7-c1f3216cfc0c/probe/0.log" Feb 02 23:22:28 crc kubenswrapper[4755]: I0202 23:22:28.576874 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_f8c6396a-eb16-427b-8b30-07e7ad4d0415/cloudkitty-api-log/0.log" Feb 02 23:22:28 crc kubenswrapper[4755]: I0202 23:22:28.632437 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_f8c6396a-eb16-427b-8b30-07e7ad4d0415/cloudkitty-api/0.log" Feb 02 23:22:28 crc kubenswrapper[4755]: I0202 23:22:28.677327 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_1092ca47-2068-4bc1-9e92-a085dc5eac3a/loki-compactor/0.log" Feb 02 23:22:28 crc kubenswrapper[4755]: I0202 23:22:28.817442 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-66dfd9bb-99jqt_c6c5284b-8717-4e3a-bfc6-73fcadc8303d/loki-distributor/0.log" Feb 02 23:22:28 crc kubenswrapper[4755]: I0202 23:22:28.882166 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-7db4f4db8c-9jzvh_957644f6-ace8-4ceb-88b6-7a2228097714/gateway/0.log" Feb 02 23:22:29 crc kubenswrapper[4755]: I0202 23:22:29.050082 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-7db4f4db8c-kphqd_ddc85ceb-9be7-45ae-8a7b-fd1aabfed85a/gateway/0.log" Feb 02 23:22:29 crc kubenswrapper[4755]: I0202 23:22:29.071126 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_f9a55300-7a89-493a-9001-1b77e3b64530/loki-index-gateway/0.log" Feb 02 23:22:29 crc kubenswrapper[4755]: I0202 23:22:29.422584 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_5717cdb5-a227-4975-b808-068f0ace63c5/loki-ingester/0.log" Feb 02 23:22:29 crc kubenswrapper[4755]: I0202 23:22:29.538878 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-795fd8f8cc-gc84f_bd97d545-a2b9-47ea-a0cb-564ef5ac59d3/loki-querier/0.log" Feb 02 23:22:29 crc kubenswrapper[4755]: I0202 23:22:29.711232 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-5cd44666df-mhst9_654caa39-8ecb-43ef-b132-aca5a922fd0f/loki-query-frontend/0.log" Feb 02 23:22:29 crc kubenswrapper[4755]: I0202 23:22:29.973914 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-4jbf4_b68ca425-9be7-41e0-b1c3-5566cb559d71/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:30 crc kubenswrapper[4755]: I0202 23:22:30.285243 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-p7jpq_21f9cef1-eabf-447e-abbb-a8c7d5627994/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:30 crc kubenswrapper[4755]: I0202 23:22:30.431433 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-c4b758ff5-78spt_1b8a733b-079b-4c38-90c3-6136137f4d40/init/0.log" Feb 02 23:22:30 crc kubenswrapper[4755]: I0202 23:22:30.550798 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-c4b758ff5-78spt_1b8a733b-079b-4c38-90c3-6136137f4d40/init/0.log" Feb 02 23:22:30 crc kubenswrapper[4755]: I0202 23:22:30.594775 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-c4b758ff5-78spt_1b8a733b-079b-4c38-90c3-6136137f4d40/dnsmasq-dns/0.log" Feb 02 23:22:30 crc kubenswrapper[4755]: I0202 23:22:30.726654 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-h4969_1010f2d3-0d67-4d01-8e07-f95412d03443/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:30 crc kubenswrapper[4755]: I0202 23:22:30.835625 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_97769578-d73a-448d-a806-3296baae6447/glance-log/0.log" Feb 02 23:22:30 crc kubenswrapper[4755]: I0202 23:22:30.839321 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_97769578-d73a-448d-a806-3296baae6447/glance-httpd/0.log" Feb 02 23:22:31 crc kubenswrapper[4755]: I0202 23:22:31.052044 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_5c76c588-a947-484d-88e4-4fe526e1ffb4/glance-log/0.log" Feb 02 23:22:31 crc kubenswrapper[4755]: I0202 23:22:31.060637 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_5c76c588-a947-484d-88e4-4fe526e1ffb4/glance-httpd/0.log" Feb 02 23:22:31 crc kubenswrapper[4755]: I0202 23:22:31.312128 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-hfjfq_50b91f5b-54be-49cb-bfff-e2d317db916e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:31 crc kubenswrapper[4755]: I0202 23:22:31.413581 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-szhnq_3fffdbfb-f7b8-46c3-93ad-24ec8fb44ddc/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:31 crc kubenswrapper[4755]: I0202 23:22:31.636448 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29501221-9nvdx_aae14811-9ad0-4c73-b154-10736e7504c6/keystone-cron/0.log" Feb 02 23:22:31 crc kubenswrapper[4755]: I0202 23:22:31.676480 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7cb7f594d6-76xlv_9ba7348e-74b2-4840-8d02-8bfa3c89c483/keystone-api/0.log" Feb 02 23:22:31 crc kubenswrapper[4755]: I0202 23:22:31.868703 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_13cf3041-fd62-4742-a4ea-73a9c6817f51/kube-state-metrics/0.log" Feb 02 23:22:32 crc kubenswrapper[4755]: I0202 23:22:32.139845 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-gncrz_b7b416e0-c078-47ba-97e8-f7c16294e8e5/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:32 crc kubenswrapper[4755]: I0202 23:22:32.381375 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-d7cb5dffc-4r8bd_0b81f473-96f7-4d5c-9695-cac22c344ed5/neutron-api/0.log" Feb 02 23:22:32 crc kubenswrapper[4755]: I0202 23:22:32.467339 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-d7cb5dffc-4r8bd_0b81f473-96f7-4d5c-9695-cac22c344ed5/neutron-httpd/0.log" Feb 02 23:22:32 crc kubenswrapper[4755]: I0202 23:22:32.616459 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-svr7n_fb61c726-e017-45d6-a3d2-883f93e04eb8/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:33 crc kubenswrapper[4755]: I0202 23:22:33.026765 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_f2835e13-0c5c-4211-935d-0b3bd14a5aca/nova-api-log/0.log" Feb 02 23:22:33 crc kubenswrapper[4755]: I0202 23:22:33.138393 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_f2835e13-0c5c-4211-935d-0b3bd14a5aca/nova-api-api/0.log" Feb 02 23:22:33 crc kubenswrapper[4755]: I0202 23:22:33.255015 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_e3895493-b282-4909-ad12-f4d6171695f2/nova-cell0-conductor-conductor/0.log" Feb 02 23:22:33 crc kubenswrapper[4755]: I0202 23:22:33.492800 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_31b3bd9c-82ad-47a5-9275-cb6b3ea02256/nova-cell1-conductor-conductor/0.log" Feb 02 23:22:33 crc kubenswrapper[4755]: I0202 23:22:33.575652 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_c4ed5cb5-496b-4d30-9134-6fe50ebe4759/nova-cell1-novncproxy-novncproxy/0.log" Feb 02 23:22:33 crc kubenswrapper[4755]: I0202 23:22:33.824006 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-pp6rs_57c65d38-e362-4fcb-80e6-8e1881c990f1/nova-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:34 crc kubenswrapper[4755]: I0202 23:22:34.062081 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_140c16ab-7ad3-45f6-8e89-edad569ee119/nova-metadata-log/0.log" Feb 02 23:22:34 crc kubenswrapper[4755]: I0202 23:22:34.345958 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_48f9ab6a-cee4-4fd9-b5b4-52948ebc0ac5/cloudkitty-proc/0.log" Feb 02 23:22:34 crc kubenswrapper[4755]: I0202 23:22:34.404184 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_63990686-ba43-4ee1-8e18-6855cccae33b/nova-scheduler-scheduler/0.log" Feb 02 23:22:34 crc kubenswrapper[4755]: I0202 23:22:34.584851 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_91145094-ac53-469f-9ac1-e10732802d35/mysql-bootstrap/0.log" Feb 02 23:22:34 crc kubenswrapper[4755]: I0202 23:22:34.736676 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_91145094-ac53-469f-9ac1-e10732802d35/mysql-bootstrap/0.log" Feb 02 23:22:34 crc kubenswrapper[4755]: I0202 23:22:34.844448 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_140c16ab-7ad3-45f6-8e89-edad569ee119/nova-metadata-metadata/0.log" Feb 02 23:22:34 crc kubenswrapper[4755]: I0202 23:22:34.958656 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_91145094-ac53-469f-9ac1-e10732802d35/galera/0.log" Feb 02 23:22:35 crc kubenswrapper[4755]: I0202 23:22:35.178205 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0/mysql-bootstrap/0.log" Feb 02 23:22:35 crc kubenswrapper[4755]: I0202 23:22:35.332568 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0/mysql-bootstrap/0.log" Feb 02 23:22:35 crc kubenswrapper[4755]: I0202 23:22:35.389803 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_3dd6aa11-4a8d-460c-936d-48d1d8ecb1e0/galera/0.log" Feb 02 23:22:35 crc kubenswrapper[4755]: I0202 23:22:35.392132 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_e160c11e-8b86-4837-9a86-bd2eb97f94d9/openstackclient/0.log" Feb 02 23:22:35 crc kubenswrapper[4755]: I0202 23:22:35.591199 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-tpnqc_0641bc0d-aae7-45cf-b590-cbe9abe2c99e/openstack-network-exporter/0.log" Feb 02 23:22:35 crc kubenswrapper[4755]: I0202 23:22:35.659365 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jdd7m_06016f18-0f29-4d82-aa08-233d91c9a744/ovn-controller/0.log" Feb 02 23:22:35 crc kubenswrapper[4755]: I0202 23:22:35.939990 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bzn9p_cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0/ovsdb-server-init/0.log" Feb 02 23:22:36 crc kubenswrapper[4755]: I0202 23:22:36.074465 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bzn9p_cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0/ovsdb-server-init/0.log" Feb 02 23:22:36 crc kubenswrapper[4755]: I0202 23:22:36.076216 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bzn9p_cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0/ovs-vswitchd/0.log" Feb 02 23:22:36 crc kubenswrapper[4755]: I0202 23:22:36.088751 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bzn9p_cf1e2dcd-1dcd-44be-ae0f-e84fb7ce15f0/ovsdb-server/0.log" Feb 02 23:22:36 crc kubenswrapper[4755]: I0202 23:22:36.320552 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_03b1c517-cae2-45bd-a887-fe41ad462721/openstack-network-exporter/0.log" Feb 02 23:22:36 crc kubenswrapper[4755]: I0202 23:22:36.390658 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-z6lpt_d9ce0f10-bc90-4f02-8c98-0b1e054c026f/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:36 crc kubenswrapper[4755]: I0202 23:22:36.472151 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_03b1c517-cae2-45bd-a887-fe41ad462721/ovn-northd/0.log" Feb 02 23:22:36 crc kubenswrapper[4755]: I0202 23:22:36.618184 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_47898ca9-c1e8-4b61-9e3c-701743aff784/ovsdbserver-nb/0.log" Feb 02 23:22:36 crc kubenswrapper[4755]: I0202 23:22:36.631971 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_47898ca9-c1e8-4b61-9e3c-701743aff784/openstack-network-exporter/0.log" Feb 02 23:22:36 crc kubenswrapper[4755]: I0202 23:22:36.871242 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_caf81ad2-83b7-4930-a67e-0c8dce4690ad/ovsdbserver-sb/0.log" Feb 02 23:22:36 crc kubenswrapper[4755]: I0202 23:22:36.906039 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_caf81ad2-83b7-4930-a67e-0c8dce4690ad/openstack-network-exporter/0.log" Feb 02 23:22:37 crc kubenswrapper[4755]: I0202 23:22:37.048559 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-f565458cd-6bkv6_541bac40-f0e1-4d39-9595-447b0f5b0c26/placement-api/0.log" Feb 02 23:22:37 crc kubenswrapper[4755]: I0202 23:22:37.197544 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_56e97e05-60e4-4c71-b081-18bb5dde670b/init-config-reloader/0.log" Feb 02 23:22:37 crc kubenswrapper[4755]: I0202 23:22:37.247313 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-f565458cd-6bkv6_541bac40-f0e1-4d39-9595-447b0f5b0c26/placement-log/0.log" Feb 02 23:22:37 crc kubenswrapper[4755]: I0202 23:22:37.377275 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_56e97e05-60e4-4c71-b081-18bb5dde670b/init-config-reloader/0.log" Feb 02 23:22:37 crc kubenswrapper[4755]: I0202 23:22:37.401457 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_56e97e05-60e4-4c71-b081-18bb5dde670b/config-reloader/0.log" Feb 02 23:22:37 crc kubenswrapper[4755]: I0202 23:22:37.408177 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_56e97e05-60e4-4c71-b081-18bb5dde670b/prometheus/0.log" Feb 02 23:22:37 crc kubenswrapper[4755]: I0202 23:22:37.435670 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_56e97e05-60e4-4c71-b081-18bb5dde670b/thanos-sidecar/0.log" Feb 02 23:22:37 crc kubenswrapper[4755]: I0202 23:22:37.668111 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e09fc49d-8b5d-4775-a549-b5ca23d3d13e/setup-container/0.log" Feb 02 23:22:37 crc kubenswrapper[4755]: I0202 23:22:37.789959 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e09fc49d-8b5d-4775-a549-b5ca23d3d13e/rabbitmq/0.log" Feb 02 23:22:37 crc kubenswrapper[4755]: I0202 23:22:37.838955 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e09fc49d-8b5d-4775-a549-b5ca23d3d13e/setup-container/0.log" Feb 02 23:22:37 crc kubenswrapper[4755]: I0202 23:22:37.897173 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a4a28287-a8ee-439b-a1af-927b8819a6ae/setup-container/0.log" Feb 02 23:22:38 crc kubenswrapper[4755]: I0202 23:22:38.063088 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a4a28287-a8ee-439b-a1af-927b8819a6ae/setup-container/0.log" Feb 02 23:22:38 crc kubenswrapper[4755]: I0202 23:22:38.078953 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a4a28287-a8ee-439b-a1af-927b8819a6ae/rabbitmq/0.log" Feb 02 23:22:38 crc kubenswrapper[4755]: I0202 23:22:38.342943 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-8njm7_f7fc429c-671b-4085-ab07-8f4b2c53f496/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:38 crc kubenswrapper[4755]: I0202 23:22:38.484573 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-cdlnd_bb72c052-d5a9-4448-af05-1396fd408383/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:38 crc kubenswrapper[4755]: I0202 23:22:38.552787 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-5jjg9_ebfacc8a-aef0-4478-bd5f-5285e424fa0b/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:38 crc kubenswrapper[4755]: I0202 23:22:38.755595 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-vvwd4_1c1128ff-2549-455b-bee1-186751b6966b/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:38 crc kubenswrapper[4755]: I0202 23:22:38.820612 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-l6kfd_42d13e43-ccef-4df2-a2eb-f7b8d5d58ac0/ssh-known-hosts-edpm-deployment/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.003403 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-7857dcf8cf-dgghm_2fc39798-6917-40f3-badb-2633a9b0f37a/proxy-server/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.073604 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-7857dcf8cf-dgghm_2fc39798-6917-40f3-badb-2633a9b0f37a/proxy-httpd/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.198130 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-g6jdb_980ec437-3885-426d-9b2c-1773951f8c86/swift-ring-rebalance/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.287264 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/account-reaper/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.303353 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/account-auditor/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.419269 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/account-replicator/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.447400 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/account-server/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.551284 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/container-auditor/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.562882 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/container-replicator/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.623697 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/container-server/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.716146 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/container-updater/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.787820 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/object-expirer/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.798214 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/object-auditor/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.921000 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/object-replicator/0.log" Feb 02 23:22:39 crc kubenswrapper[4755]: I0202 23:22:39.941992 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/object-server/0.log" Feb 02 23:22:40 crc kubenswrapper[4755]: I0202 23:22:40.086581 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/rsync/0.log" Feb 02 23:22:40 crc kubenswrapper[4755]: I0202 23:22:40.123545 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/object-updater/0.log" Feb 02 23:22:40 crc kubenswrapper[4755]: I0202 23:22:40.203961 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_50a165a2-aeeb-4f83-9af3-a33f76b34a39/swift-recon-cron/0.log" Feb 02 23:22:40 crc kubenswrapper[4755]: I0202 23:22:40.421158 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-f6v85_ea9bda0f-9dcd-4175-940e-023f11ef0f44/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:40 crc kubenswrapper[4755]: I0202 23:22:40.436086 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-q9pgk_811d66f3-04e9-4de2-8509-0c9f409addee/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Feb 02 23:22:46 crc kubenswrapper[4755]: I0202 23:22:46.265614 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_2ebaf1c7-94a5-47df-abbf-5ac70251c816/memcached/0.log" Feb 02 23:22:53 crc kubenswrapper[4755]: I0202 23:22:53.389112 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:22:53 crc kubenswrapper[4755]: I0202 23:22:53.389403 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:23:06 crc kubenswrapper[4755]: I0202 23:23:06.434839 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn_137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b/util/0.log" Feb 02 23:23:06 crc kubenswrapper[4755]: I0202 23:23:06.575085 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn_137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b/util/0.log" Feb 02 23:23:06 crc kubenswrapper[4755]: I0202 23:23:06.619465 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn_137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b/pull/0.log" Feb 02 23:23:06 crc kubenswrapper[4755]: I0202 23:23:06.666132 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn_137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b/pull/0.log" Feb 02 23:23:06 crc kubenswrapper[4755]: I0202 23:23:06.825690 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn_137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b/pull/0.log" Feb 02 23:23:06 crc kubenswrapper[4755]: I0202 23:23:06.825841 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn_137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b/extract/0.log" Feb 02 23:23:06 crc kubenswrapper[4755]: I0202 23:23:06.855779 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_96fa1d457cffabaf71a3129631753b389eaf99a63a24ba80c3781ab5aangdnn_137c5f37-a2a1-41d2-90bc-c3e33a5a8e1b/util/0.log" Feb 02 23:23:07 crc kubenswrapper[4755]: I0202 23:23:07.060396 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8d874c8fc-gfmgq_f8a6ce9f-fde2-4696-9302-7edb0a04d233/manager/0.log" Feb 02 23:23:07 crc kubenswrapper[4755]: I0202 23:23:07.135003 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b6c4d8c5f-lpnrn_49a76ede-9115-4a09-b344-f7e130018c83/manager/0.log" Feb 02 23:23:07 crc kubenswrapper[4755]: I0202 23:23:07.216702 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6d9697b7f4-sgwm7_62edcea1-a12a-428b-bec6-d5c14bcb2d9d/manager/0.log" Feb 02 23:23:07 crc kubenswrapper[4755]: I0202 23:23:07.361599 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8886f4c47-4br2h_06cf134b-94e8-4945-b882-bc54dd5c5045/manager/0.log" Feb 02 23:23:07 crc kubenswrapper[4755]: I0202 23:23:07.443913 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-69d6db494d-lz7b7_cc7aea9e-48ae-4d78-835e-3516d8bdd1e0/manager/0.log" Feb 02 23:23:07 crc kubenswrapper[4755]: I0202 23:23:07.521274 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5fb775575f-hvd6r_7cdb62bb-2e9f-43c8-a6ac-5e05577fb7bd/manager/0.log" Feb 02 23:23:07 crc kubenswrapper[4755]: I0202 23:23:07.768530 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5f4b8bd54d-hlqln_4bf6953c-28a8-49f6-b850-d6572decd288/manager/0.log" Feb 02 23:23:07 crc kubenswrapper[4755]: I0202 23:23:07.898706 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-79955696d6-zh58s_44ef88e4-d62d-4f16-ab3c-15b7136ac5c9/manager/0.log" Feb 02 23:23:07 crc kubenswrapper[4755]: I0202 23:23:07.972554 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-84f48565d4-rj689_62f03e5d-4b33-46dc-b74f-c5b2f19e8d7b/manager/0.log" Feb 02 23:23:08 crc kubenswrapper[4755]: I0202 23:23:08.116958 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7dd968899f-f8k9j_67dc65c3-ffb5-4139-b405-87a180ddb551/manager/0.log" Feb 02 23:23:08 crc kubenswrapper[4755]: I0202 23:23:08.175902 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67bf948998-h69gk_c89c93bd-725d-4cb4-9464-22674774af64/manager/0.log" Feb 02 23:23:08 crc kubenswrapper[4755]: I0202 23:23:08.308369 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-585dbc889-644bb_e9675ae3-4e81-4adb-85a0-cd21ac496df2/manager/0.log" Feb 02 23:23:08 crc kubenswrapper[4755]: I0202 23:23:08.448719 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-55bff696bd-lm5vg_afe1c6d0-666e-46f4-93f9-a814399d699b/manager/0.log" Feb 02 23:23:08 crc kubenswrapper[4755]: I0202 23:23:08.481853 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6687f8d877-dwrws_1734ceac-dd78-4d4b-986c-5a3c27c3c48f/manager/0.log" Feb 02 23:23:08 crc kubenswrapper[4755]: I0202 23:23:08.611451 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-59c4b45c4d5nfbs_28233917-7a5b-4379-aa43-c42633f51848/manager/0.log" Feb 02 23:23:08 crc kubenswrapper[4755]: I0202 23:23:08.734760 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-848b5fdc57-2r7s8_65cf4adb-782b-4e5e-b961-4e5f29b83b80/operator/0.log" Feb 02 23:23:09 crc kubenswrapper[4755]: I0202 23:23:09.085132 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-njmg8_fff245fb-8d48-4499-8963-1efcf0705321/registry-server/0.log" Feb 02 23:23:09 crc kubenswrapper[4755]: I0202 23:23:09.295645 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-788c46999f-5q9mw_5073a3de-5fb1-4375-9db4-a7009d6b8799/manager/0.log" Feb 02 23:23:09 crc kubenswrapper[4755]: I0202 23:23:09.344018 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5b964cf4cd-7p67q_ec4fb29e-f536-46ec-bd89-5b212f2a5d13/manager/0.log" Feb 02 23:23:09 crc kubenswrapper[4755]: I0202 23:23:09.548713 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-lpvlk_b0dd57f2-6b55-4cc3-827f-a5c2321b2ad5/operator/0.log" Feb 02 23:23:09 crc kubenswrapper[4755]: I0202 23:23:09.778886 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-68fc8c869-cv8gh_b5d1ece3-a9d2-4620-98f2-2bd2ff66184d/manager/0.log" Feb 02 23:23:09 crc kubenswrapper[4755]: I0202 23:23:09.816441 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-bc8597898-njwc6_9960033f-69b1-4b1c-9e06-aaf5e6d61559/manager/0.log" Feb 02 23:23:09 crc kubenswrapper[4755]: I0202 23:23:09.959476 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-56f8bfcd9f-6xtvs_368193dc-45fb-4dff-8c24-8c38a7fd56da/manager/0.log" Feb 02 23:23:10 crc kubenswrapper[4755]: I0202 23:23:10.154095 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5b96584f66-98jt2_fd656823-f4de-4e4d-a109-7a180552abd1/manager/0.log" Feb 02 23:23:10 crc kubenswrapper[4755]: I0202 23:23:10.195000 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-564965969-5prbd_04cd6a32-6398-4f89-b034-3a9ebf8da40b/manager/0.log" Feb 02 23:23:23 crc kubenswrapper[4755]: I0202 23:23:23.389638 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:23:23 crc kubenswrapper[4755]: I0202 23:23:23.390276 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:23:23 crc kubenswrapper[4755]: I0202 23:23:23.390341 4755 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" Feb 02 23:23:23 crc kubenswrapper[4755]: I0202 23:23:23.391412 4755 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0"} pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 23:23:23 crc kubenswrapper[4755]: I0202 23:23:23.391509 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" containerID="cri-o://034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" gracePeriod=600 Feb 02 23:23:23 crc kubenswrapper[4755]: E0202 23:23:23.520044 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:23:24 crc kubenswrapper[4755]: I0202 23:23:24.355568 4755 generic.go:334] "Generic (PLEG): container finished" podID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" exitCode=0 Feb 02 23:23:24 crc kubenswrapper[4755]: I0202 23:23:24.355641 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerDied","Data":"034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0"} Feb 02 23:23:24 crc kubenswrapper[4755]: I0202 23:23:24.356078 4755 scope.go:117] "RemoveContainer" containerID="c437a98306916ec88f266a1ace0a3fe9dc9ba521de37a1609e0180aacf69fd42" Feb 02 23:23:24 crc kubenswrapper[4755]: I0202 23:23:24.357041 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:23:24 crc kubenswrapper[4755]: E0202 23:23:24.357493 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:23:29 crc kubenswrapper[4755]: I0202 23:23:29.226121 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-h2gd9_f6b1859b-cd07-469f-be86-e691a81d5b85/control-plane-machine-set-operator/0.log" Feb 02 23:23:29 crc kubenswrapper[4755]: I0202 23:23:29.345533 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-s6v4j_c61f342b-cd14-408f-8c6e-e65cee1ebb39/kube-rbac-proxy/0.log" Feb 02 23:23:29 crc kubenswrapper[4755]: I0202 23:23:29.431934 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-s6v4j_c61f342b-cd14-408f-8c6e-e65cee1ebb39/machine-api-operator/0.log" Feb 02 23:23:38 crc kubenswrapper[4755]: I0202 23:23:38.069892 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:23:38 crc kubenswrapper[4755]: E0202 23:23:38.071368 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:23:42 crc kubenswrapper[4755]: I0202 23:23:42.823716 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-pr5s4_727c74a2-1ffc-4659-8f00-95e14c4a266a/cert-manager-controller/0.log" Feb 02 23:23:42 crc kubenswrapper[4755]: I0202 23:23:42.927642 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-pv6b4_6fa294d0-f7ce-49cf-8817-241cb21c6778/cert-manager-cainjector/0.log" Feb 02 23:23:43 crc kubenswrapper[4755]: I0202 23:23:43.043328 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-mz5vq_fc6bced4-adaf-4d90-b8ac-25c7e5fbe33e/cert-manager-webhook/0.log" Feb 02 23:23:49 crc kubenswrapper[4755]: I0202 23:23:49.069623 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:23:49 crc kubenswrapper[4755]: E0202 23:23:49.070885 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:23:56 crc kubenswrapper[4755]: I0202 23:23:56.914488 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-ht2qx_5e6d05f8-65b8-49af-a844-b8bac61552c2/nmstate-console-plugin/0.log" Feb 02 23:23:57 crc kubenswrapper[4755]: I0202 23:23:57.144851 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-sj7f5_64c0f9fe-4e93-4135-bd8a-88e659d417d9/nmstate-handler/0.log" Feb 02 23:23:57 crc kubenswrapper[4755]: I0202 23:23:57.207084 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-z6gz8_329545ba-b43d-4600-bc08-84159813a2e4/nmstate-metrics/0.log" Feb 02 23:23:57 crc kubenswrapper[4755]: I0202 23:23:57.211775 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-z6gz8_329545ba-b43d-4600-bc08-84159813a2e4/kube-rbac-proxy/0.log" Feb 02 23:23:57 crc kubenswrapper[4755]: I0202 23:23:57.348597 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-vsn87_930aa2ef-4ef3-4759-9906-9cbafaf06970/nmstate-operator/0.log" Feb 02 23:23:57 crc kubenswrapper[4755]: I0202 23:23:57.405002 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-xtzkv_bea73b39-f5c5-4290-a4f5-c1338552023f/nmstate-webhook/0.log" Feb 02 23:24:03 crc kubenswrapper[4755]: I0202 23:24:03.068913 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:24:03 crc kubenswrapper[4755]: E0202 23:24:03.069894 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:24:11 crc kubenswrapper[4755]: I0202 23:24:11.168177 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-f7c7b88ff-8vtw5_290fd246-5e83-433b-8aea-9ae358ae4377/kube-rbac-proxy/0.log" Feb 02 23:24:11 crc kubenswrapper[4755]: I0202 23:24:11.198129 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-f7c7b88ff-8vtw5_290fd246-5e83-433b-8aea-9ae358ae4377/manager/0.log" Feb 02 23:24:16 crc kubenswrapper[4755]: I0202 23:24:16.068716 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:24:16 crc kubenswrapper[4755]: E0202 23:24:16.069580 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:24:25 crc kubenswrapper[4755]: I0202 23:24:25.244900 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-tbnvx_2c08ecd8-3885-49b8-af63-b0ffee6b10ef/prometheus-operator/0.log" Feb 02 23:24:25 crc kubenswrapper[4755]: I0202 23:24:25.405530 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_425aaf78-7478-49df-822c-f7108a7765bb/prometheus-operator-admission-webhook/0.log" Feb 02 23:24:25 crc kubenswrapper[4755]: I0202 23:24:25.458321 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_71505c0d-899c-4c32-8563-8185908f8f5a/prometheus-operator-admission-webhook/0.log" Feb 02 23:24:25 crc kubenswrapper[4755]: I0202 23:24:25.598590 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-gs2gv_c62fe136-2618-4f3a-b6d0-b6d35df54f4c/operator/0.log" Feb 02 23:24:25 crc kubenswrapper[4755]: I0202 23:24:25.641402 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-ppqsb_5a9aeb48-ea8d-4648-a311-263475a2738c/perses-operator/0.log" Feb 02 23:24:30 crc kubenswrapper[4755]: I0202 23:24:30.069183 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:24:30 crc kubenswrapper[4755]: E0202 23:24:30.069874 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:24:40 crc kubenswrapper[4755]: I0202 23:24:40.138585 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-v6qqk_cb411fd0-384e-434e-82c7-6d42381d016e/kube-rbac-proxy/0.log" Feb 02 23:24:40 crc kubenswrapper[4755]: I0202 23:24:40.214671 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-v6qqk_cb411fd0-384e-434e-82c7-6d42381d016e/controller/0.log" Feb 02 23:24:40 crc kubenswrapper[4755]: I0202 23:24:40.313942 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/cp-frr-files/0.log" Feb 02 23:24:40 crc kubenswrapper[4755]: I0202 23:24:40.553510 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/cp-metrics/0.log" Feb 02 23:24:40 crc kubenswrapper[4755]: I0202 23:24:40.554049 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/cp-reloader/0.log" Feb 02 23:24:40 crc kubenswrapper[4755]: I0202 23:24:40.561292 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/cp-frr-files/0.log" Feb 02 23:24:40 crc kubenswrapper[4755]: I0202 23:24:40.568073 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/cp-reloader/0.log" Feb 02 23:24:40 crc kubenswrapper[4755]: I0202 23:24:40.780182 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/cp-frr-files/0.log" Feb 02 23:24:40 crc kubenswrapper[4755]: I0202 23:24:40.799597 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/cp-metrics/0.log" Feb 02 23:24:40 crc kubenswrapper[4755]: I0202 23:24:40.812913 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/cp-metrics/0.log" Feb 02 23:24:40 crc kubenswrapper[4755]: I0202 23:24:40.833592 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/cp-reloader/0.log" Feb 02 23:24:41 crc kubenswrapper[4755]: I0202 23:24:41.018233 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/cp-metrics/0.log" Feb 02 23:24:41 crc kubenswrapper[4755]: I0202 23:24:41.018264 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/cp-frr-files/0.log" Feb 02 23:24:41 crc kubenswrapper[4755]: I0202 23:24:41.026652 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/cp-reloader/0.log" Feb 02 23:24:41 crc kubenswrapper[4755]: I0202 23:24:41.039439 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/controller/0.log" Feb 02 23:24:41 crc kubenswrapper[4755]: I0202 23:24:41.214979 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/frr-metrics/0.log" Feb 02 23:24:41 crc kubenswrapper[4755]: I0202 23:24:41.235508 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/kube-rbac-proxy/0.log" Feb 02 23:24:41 crc kubenswrapper[4755]: I0202 23:24:41.271913 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/kube-rbac-proxy-frr/0.log" Feb 02 23:24:41 crc kubenswrapper[4755]: I0202 23:24:41.440228 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-c2x4m_65b275de-548e-4eea-bb10-7f32abf4f838/frr-k8s-webhook-server/0.log" Feb 02 23:24:41 crc kubenswrapper[4755]: I0202 23:24:41.444995 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/reloader/0.log" Feb 02 23:24:41 crc kubenswrapper[4755]: I0202 23:24:41.693580 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-f4d7f4757-x7zlf_eae78a10-5c8c-4917-a954-fc548de08005/manager/0.log" Feb 02 23:24:41 crc kubenswrapper[4755]: I0202 23:24:41.929952 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-65d88c686d-p24pr_599e170d-dc41-4c07-978b-616ce79d338d/webhook-server/0.log" Feb 02 23:24:41 crc kubenswrapper[4755]: I0202 23:24:41.938826 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-9h74b_fa5b9de9-7fe3-4aed-9187-c9660d3f5e38/kube-rbac-proxy/0.log" Feb 02 23:24:42 crc kubenswrapper[4755]: I0202 23:24:42.562228 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-g4qwg_e86bcc6c-4300-4fcc-8333-902799e386ad/frr/0.log" Feb 02 23:24:42 crc kubenswrapper[4755]: I0202 23:24:42.592939 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-9h74b_fa5b9de9-7fe3-4aed-9187-c9660d3f5e38/speaker/0.log" Feb 02 23:24:43 crc kubenswrapper[4755]: I0202 23:24:43.069470 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:24:43 crc kubenswrapper[4755]: E0202 23:24:43.069776 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:24:54 crc kubenswrapper[4755]: I0202 23:24:54.070016 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:24:54 crc kubenswrapper[4755]: E0202 23:24:54.071128 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:24:56 crc kubenswrapper[4755]: I0202 23:24:56.886706 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv_8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b/util/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.104226 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv_8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b/pull/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.144041 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv_8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b/util/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.168492 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv_8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b/pull/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.315752 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv_8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b/pull/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.324611 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv_8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b/util/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.335039 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcch9lv_8ef6a0bb-70aa-4908-b7fe-2ea6c8e13e9b/extract/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.455871 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk_de93e49c-6575-481e-85e1-546d78192cc1/util/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.617437 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk_de93e49c-6575-481e-85e1-546d78192cc1/pull/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.621823 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk_de93e49c-6575-481e-85e1-546d78192cc1/pull/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.646419 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk_de93e49c-6575-481e-85e1-546d78192cc1/util/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.787640 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk_de93e49c-6575-481e-85e1-546d78192cc1/pull/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.793672 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk_de93e49c-6575-481e-85e1-546d78192cc1/util/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.820062 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_3e572a74f8b8ca2bcfe04329d4f26bd9689911be5d166a7403bd6ae773qvpnk_de93e49c-6575-481e-85e1-546d78192cc1/extract/0.log" Feb 02 23:24:57 crc kubenswrapper[4755]: I0202 23:24:57.945158 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc_f0e16a35-fd76-4a88-93c2-7011b557e703/util/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.090670 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc_f0e16a35-fd76-4a88-93c2-7011b557e703/util/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.115792 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc_f0e16a35-fd76-4a88-93c2-7011b557e703/pull/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.137376 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc_f0e16a35-fd76-4a88-93c2-7011b557e703/pull/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.318427 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc_f0e16a35-fd76-4a88-93c2-7011b557e703/util/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.325718 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc_f0e16a35-fd76-4a88-93c2-7011b557e703/extract/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.335629 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713zsrtc_f0e16a35-fd76-4a88-93c2-7011b557e703/pull/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.467685 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg_b889cf85-3262-4e0f-834f-5598d21c019b/util/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.612335 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg_b889cf85-3262-4e0f-834f-5598d21c019b/util/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.617289 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg_b889cf85-3262-4e0f-834f-5598d21c019b/pull/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.630445 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg_b889cf85-3262-4e0f-834f-5598d21c019b/pull/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.774949 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg_b889cf85-3262-4e0f-834f-5598d21c019b/util/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.775834 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg_b889cf85-3262-4e0f-834f-5598d21c019b/extract/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.788213 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ln5jg_b889cf85-3262-4e0f-834f-5598d21c019b/pull/0.log" Feb 02 23:24:58 crc kubenswrapper[4755]: I0202 23:24:58.933112 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-slk5m_596cd905-0e7d-41ba-a535-ffa0f90820ec/extract-utilities/0.log" Feb 02 23:24:59 crc kubenswrapper[4755]: I0202 23:24:59.116691 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-slk5m_596cd905-0e7d-41ba-a535-ffa0f90820ec/extract-utilities/0.log" Feb 02 23:24:59 crc kubenswrapper[4755]: I0202 23:24:59.151929 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-slk5m_596cd905-0e7d-41ba-a535-ffa0f90820ec/extract-content/0.log" Feb 02 23:24:59 crc kubenswrapper[4755]: I0202 23:24:59.152028 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-slk5m_596cd905-0e7d-41ba-a535-ffa0f90820ec/extract-content/0.log" Feb 02 23:24:59 crc kubenswrapper[4755]: I0202 23:24:59.322266 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-slk5m_596cd905-0e7d-41ba-a535-ffa0f90820ec/extract-utilities/0.log" Feb 02 23:24:59 crc kubenswrapper[4755]: I0202 23:24:59.324850 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-slk5m_596cd905-0e7d-41ba-a535-ffa0f90820ec/extract-content/0.log" Feb 02 23:24:59 crc kubenswrapper[4755]: I0202 23:24:59.473343 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-slk5m_596cd905-0e7d-41ba-a535-ffa0f90820ec/registry-server/0.log" Feb 02 23:24:59 crc kubenswrapper[4755]: I0202 23:24:59.522413 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-82rwp_3dbe5a97-2c71-4e7a-b295-53dab6642a1b/extract-utilities/0.log" Feb 02 23:24:59 crc kubenswrapper[4755]: I0202 23:24:59.679952 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-82rwp_3dbe5a97-2c71-4e7a-b295-53dab6642a1b/extract-content/0.log" Feb 02 23:24:59 crc kubenswrapper[4755]: I0202 23:24:59.686277 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-82rwp_3dbe5a97-2c71-4e7a-b295-53dab6642a1b/extract-utilities/0.log" Feb 02 23:24:59 crc kubenswrapper[4755]: I0202 23:24:59.720349 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-82rwp_3dbe5a97-2c71-4e7a-b295-53dab6642a1b/extract-content/0.log" Feb 02 23:24:59 crc kubenswrapper[4755]: I0202 23:24:59.824795 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-82rwp_3dbe5a97-2c71-4e7a-b295-53dab6642a1b/extract-utilities/0.log" Feb 02 23:24:59 crc kubenswrapper[4755]: I0202 23:24:59.862261 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-82rwp_3dbe5a97-2c71-4e7a-b295-53dab6642a1b/extract-content/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.081619 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-6spmr_b30941eb-134e-4f00-9501-e8f8f47e9822/marketplace-operator/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.227984 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2mpx7_9db9f5df-c29a-49c2-9130-f5066595eb43/extract-utilities/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.320979 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-82rwp_3dbe5a97-2c71-4e7a-b295-53dab6642a1b/registry-server/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.332397 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2mpx7_9db9f5df-c29a-49c2-9130-f5066595eb43/extract-content/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.346442 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2mpx7_9db9f5df-c29a-49c2-9130-f5066595eb43/extract-utilities/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.392349 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2mpx7_9db9f5df-c29a-49c2-9130-f5066595eb43/extract-content/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.528750 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2mpx7_9db9f5df-c29a-49c2-9130-f5066595eb43/extract-utilities/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.571446 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lrlh4_e855c69f-498a-4338-b0c2-d5d94116c359/extract-utilities/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.588847 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2mpx7_9db9f5df-c29a-49c2-9130-f5066595eb43/extract-content/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.639416 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2mpx7_9db9f5df-c29a-49c2-9130-f5066595eb43/registry-server/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.762256 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lrlh4_e855c69f-498a-4338-b0c2-d5d94116c359/extract-utilities/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.776716 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lrlh4_e855c69f-498a-4338-b0c2-d5d94116c359/extract-content/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.788438 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lrlh4_e855c69f-498a-4338-b0c2-d5d94116c359/extract-content/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.923804 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lrlh4_e855c69f-498a-4338-b0c2-d5d94116c359/extract-utilities/0.log" Feb 02 23:25:00 crc kubenswrapper[4755]: I0202 23:25:00.940194 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lrlh4_e855c69f-498a-4338-b0c2-d5d94116c359/extract-content/0.log" Feb 02 23:25:01 crc kubenswrapper[4755]: I0202 23:25:01.433050 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-lrlh4_e855c69f-498a-4338-b0c2-d5d94116c359/registry-server/0.log" Feb 02 23:25:06 crc kubenswrapper[4755]: I0202 23:25:06.069993 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:25:06 crc kubenswrapper[4755]: E0202 23:25:06.070845 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:25:14 crc kubenswrapper[4755]: I0202 23:25:14.217038 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-54fdcc7f46-g2xff_425aaf78-7478-49df-822c-f7108a7765bb/prometheus-operator-admission-webhook/0.log" Feb 02 23:25:14 crc kubenswrapper[4755]: I0202 23:25:14.242699 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-54fdcc7f46-zpkwh_71505c0d-899c-4c32-8563-8185908f8f5a/prometheus-operator-admission-webhook/0.log" Feb 02 23:25:14 crc kubenswrapper[4755]: I0202 23:25:14.255366 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-tbnvx_2c08ecd8-3885-49b8-af63-b0ffee6b10ef/prometheus-operator/0.log" Feb 02 23:25:14 crc kubenswrapper[4755]: I0202 23:25:14.393460 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-ppqsb_5a9aeb48-ea8d-4648-a311-263475a2738c/perses-operator/0.log" Feb 02 23:25:14 crc kubenswrapper[4755]: I0202 23:25:14.425451 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-gs2gv_c62fe136-2618-4f3a-b6d0-b6d35df54f4c/operator/0.log" Feb 02 23:25:18 crc kubenswrapper[4755]: I0202 23:25:18.069699 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:25:18 crc kubenswrapper[4755]: E0202 23:25:18.072186 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:25:27 crc kubenswrapper[4755]: I0202 23:25:27.293888 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-f7c7b88ff-8vtw5_290fd246-5e83-433b-8aea-9ae358ae4377/kube-rbac-proxy/0.log" Feb 02 23:25:27 crc kubenswrapper[4755]: I0202 23:25:27.331375 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-f7c7b88ff-8vtw5_290fd246-5e83-433b-8aea-9ae358ae4377/manager/0.log" Feb 02 23:25:29 crc kubenswrapper[4755]: I0202 23:25:29.078534 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:25:29 crc kubenswrapper[4755]: E0202 23:25:29.081936 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:25:35 crc kubenswrapper[4755]: I0202 23:25:35.876883 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hrs8g"] Feb 02 23:25:35 crc kubenswrapper[4755]: E0202 23:25:35.877755 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c061ddea-3f10-4e51-8da7-2c4ebc556887" containerName="container-00" Feb 02 23:25:35 crc kubenswrapper[4755]: I0202 23:25:35.877767 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="c061ddea-3f10-4e51-8da7-2c4ebc556887" containerName="container-00" Feb 02 23:25:35 crc kubenswrapper[4755]: E0202 23:25:35.877775 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31e5d7ec-069a-4def-b27c-8c8418f98ce4" containerName="registry-server" Feb 02 23:25:35 crc kubenswrapper[4755]: I0202 23:25:35.877780 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="31e5d7ec-069a-4def-b27c-8c8418f98ce4" containerName="registry-server" Feb 02 23:25:35 crc kubenswrapper[4755]: E0202 23:25:35.877790 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31e5d7ec-069a-4def-b27c-8c8418f98ce4" containerName="extract-utilities" Feb 02 23:25:35 crc kubenswrapper[4755]: I0202 23:25:35.877797 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="31e5d7ec-069a-4def-b27c-8c8418f98ce4" containerName="extract-utilities" Feb 02 23:25:35 crc kubenswrapper[4755]: E0202 23:25:35.877823 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31e5d7ec-069a-4def-b27c-8c8418f98ce4" containerName="extract-content" Feb 02 23:25:35 crc kubenswrapper[4755]: I0202 23:25:35.877829 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="31e5d7ec-069a-4def-b27c-8c8418f98ce4" containerName="extract-content" Feb 02 23:25:35 crc kubenswrapper[4755]: I0202 23:25:35.878014 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="c061ddea-3f10-4e51-8da7-2c4ebc556887" containerName="container-00" Feb 02 23:25:35 crc kubenswrapper[4755]: I0202 23:25:35.878036 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="31e5d7ec-069a-4def-b27c-8c8418f98ce4" containerName="registry-server" Feb 02 23:25:35 crc kubenswrapper[4755]: I0202 23:25:35.879433 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:35 crc kubenswrapper[4755]: I0202 23:25:35.900336 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hrs8g"] Feb 02 23:25:36 crc kubenswrapper[4755]: I0202 23:25:36.041266 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2037e0d8-8de2-4e8f-b475-53b12144cbab-catalog-content\") pod \"community-operators-hrs8g\" (UID: \"2037e0d8-8de2-4e8f-b475-53b12144cbab\") " pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:36 crc kubenswrapper[4755]: I0202 23:25:36.041350 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2037e0d8-8de2-4e8f-b475-53b12144cbab-utilities\") pod \"community-operators-hrs8g\" (UID: \"2037e0d8-8de2-4e8f-b475-53b12144cbab\") " pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:36 crc kubenswrapper[4755]: I0202 23:25:36.041599 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m8l2\" (UniqueName: \"kubernetes.io/projected/2037e0d8-8de2-4e8f-b475-53b12144cbab-kube-api-access-7m8l2\") pod \"community-operators-hrs8g\" (UID: \"2037e0d8-8de2-4e8f-b475-53b12144cbab\") " pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:36 crc kubenswrapper[4755]: I0202 23:25:36.144323 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m8l2\" (UniqueName: \"kubernetes.io/projected/2037e0d8-8de2-4e8f-b475-53b12144cbab-kube-api-access-7m8l2\") pod \"community-operators-hrs8g\" (UID: \"2037e0d8-8de2-4e8f-b475-53b12144cbab\") " pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:36 crc kubenswrapper[4755]: I0202 23:25:36.144538 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2037e0d8-8de2-4e8f-b475-53b12144cbab-catalog-content\") pod \"community-operators-hrs8g\" (UID: \"2037e0d8-8de2-4e8f-b475-53b12144cbab\") " pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:36 crc kubenswrapper[4755]: I0202 23:25:36.144589 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2037e0d8-8de2-4e8f-b475-53b12144cbab-utilities\") pod \"community-operators-hrs8g\" (UID: \"2037e0d8-8de2-4e8f-b475-53b12144cbab\") " pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:36 crc kubenswrapper[4755]: I0202 23:25:36.145294 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2037e0d8-8de2-4e8f-b475-53b12144cbab-catalog-content\") pod \"community-operators-hrs8g\" (UID: \"2037e0d8-8de2-4e8f-b475-53b12144cbab\") " pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:36 crc kubenswrapper[4755]: I0202 23:25:36.145377 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2037e0d8-8de2-4e8f-b475-53b12144cbab-utilities\") pod \"community-operators-hrs8g\" (UID: \"2037e0d8-8de2-4e8f-b475-53b12144cbab\") " pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:36 crc kubenswrapper[4755]: I0202 23:25:36.173230 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m8l2\" (UniqueName: \"kubernetes.io/projected/2037e0d8-8de2-4e8f-b475-53b12144cbab-kube-api-access-7m8l2\") pod \"community-operators-hrs8g\" (UID: \"2037e0d8-8de2-4e8f-b475-53b12144cbab\") " pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:36 crc kubenswrapper[4755]: I0202 23:25:36.202587 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:36 crc kubenswrapper[4755]: I0202 23:25:36.813773 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hrs8g"] Feb 02 23:25:36 crc kubenswrapper[4755]: I0202 23:25:36.913546 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrs8g" event={"ID":"2037e0d8-8de2-4e8f-b475-53b12144cbab","Type":"ContainerStarted","Data":"098c1596c7e9456f1a3a768a1d8068fa3e7c65bae2b15af79f0344d627259fde"} Feb 02 23:25:37 crc kubenswrapper[4755]: I0202 23:25:37.922266 4755 generic.go:334] "Generic (PLEG): container finished" podID="2037e0d8-8de2-4e8f-b475-53b12144cbab" containerID="96ac220a67dddef0aa70e80588ec04cffb57dfdf04e96cd70c31316ef9398577" exitCode=0 Feb 02 23:25:37 crc kubenswrapper[4755]: I0202 23:25:37.922366 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrs8g" event={"ID":"2037e0d8-8de2-4e8f-b475-53b12144cbab","Type":"ContainerDied","Data":"96ac220a67dddef0aa70e80588ec04cffb57dfdf04e96cd70c31316ef9398577"} Feb 02 23:25:37 crc kubenswrapper[4755]: I0202 23:25:37.923935 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 23:25:38 crc kubenswrapper[4755]: I0202 23:25:38.931331 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrs8g" event={"ID":"2037e0d8-8de2-4e8f-b475-53b12144cbab","Type":"ContainerStarted","Data":"fdcce82a5c304ead89a8f92d34928e0c8838432c10623988acd46dceaa5395fe"} Feb 02 23:25:40 crc kubenswrapper[4755]: I0202 23:25:40.948806 4755 generic.go:334] "Generic (PLEG): container finished" podID="2037e0d8-8de2-4e8f-b475-53b12144cbab" containerID="fdcce82a5c304ead89a8f92d34928e0c8838432c10623988acd46dceaa5395fe" exitCode=0 Feb 02 23:25:40 crc kubenswrapper[4755]: I0202 23:25:40.948858 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrs8g" event={"ID":"2037e0d8-8de2-4e8f-b475-53b12144cbab","Type":"ContainerDied","Data":"fdcce82a5c304ead89a8f92d34928e0c8838432c10623988acd46dceaa5395fe"} Feb 02 23:25:41 crc kubenswrapper[4755]: I0202 23:25:41.964742 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrs8g" event={"ID":"2037e0d8-8de2-4e8f-b475-53b12144cbab","Type":"ContainerStarted","Data":"c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca"} Feb 02 23:25:41 crc kubenswrapper[4755]: I0202 23:25:41.992836 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hrs8g" podStartSLOduration=3.560643871 podStartE2EDuration="6.992813005s" podCreationTimestamp="2026-02-02 23:25:35 +0000 UTC" firstStartedPulling="2026-02-02 23:25:37.923751328 +0000 UTC m=+3093.614971654" lastFinishedPulling="2026-02-02 23:25:41.355920452 +0000 UTC m=+3097.047140788" observedRunningTime="2026-02-02 23:25:41.987904336 +0000 UTC m=+3097.679124672" watchObservedRunningTime="2026-02-02 23:25:41.992813005 +0000 UTC m=+3097.684033341" Feb 02 23:25:44 crc kubenswrapper[4755]: I0202 23:25:44.068873 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:25:44 crc kubenswrapper[4755]: E0202 23:25:44.069367 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:25:46 crc kubenswrapper[4755]: I0202 23:25:46.203648 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:46 crc kubenswrapper[4755]: I0202 23:25:46.204298 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:47 crc kubenswrapper[4755]: I0202 23:25:47.271760 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-hrs8g" podUID="2037e0d8-8de2-4e8f-b475-53b12144cbab" containerName="registry-server" probeResult="failure" output=< Feb 02 23:25:47 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Feb 02 23:25:47 crc kubenswrapper[4755]: > Feb 02 23:25:56 crc kubenswrapper[4755]: I0202 23:25:56.272920 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:56 crc kubenswrapper[4755]: I0202 23:25:56.366263 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:56 crc kubenswrapper[4755]: I0202 23:25:56.513656 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hrs8g"] Feb 02 23:25:58 crc kubenswrapper[4755]: I0202 23:25:58.168301 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hrs8g" podUID="2037e0d8-8de2-4e8f-b475-53b12144cbab" containerName="registry-server" containerID="cri-o://c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca" gracePeriod=2 Feb 02 23:25:58 crc kubenswrapper[4755]: I0202 23:25:58.736837 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:58 crc kubenswrapper[4755]: I0202 23:25:58.879552 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7m8l2\" (UniqueName: \"kubernetes.io/projected/2037e0d8-8de2-4e8f-b475-53b12144cbab-kube-api-access-7m8l2\") pod \"2037e0d8-8de2-4e8f-b475-53b12144cbab\" (UID: \"2037e0d8-8de2-4e8f-b475-53b12144cbab\") " Feb 02 23:25:58 crc kubenswrapper[4755]: I0202 23:25:58.879638 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2037e0d8-8de2-4e8f-b475-53b12144cbab-utilities\") pod \"2037e0d8-8de2-4e8f-b475-53b12144cbab\" (UID: \"2037e0d8-8de2-4e8f-b475-53b12144cbab\") " Feb 02 23:25:58 crc kubenswrapper[4755]: I0202 23:25:58.879814 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2037e0d8-8de2-4e8f-b475-53b12144cbab-catalog-content\") pod \"2037e0d8-8de2-4e8f-b475-53b12144cbab\" (UID: \"2037e0d8-8de2-4e8f-b475-53b12144cbab\") " Feb 02 23:25:58 crc kubenswrapper[4755]: I0202 23:25:58.888050 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2037e0d8-8de2-4e8f-b475-53b12144cbab-utilities" (OuterVolumeSpecName: "utilities") pod "2037e0d8-8de2-4e8f-b475-53b12144cbab" (UID: "2037e0d8-8de2-4e8f-b475-53b12144cbab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:25:58 crc kubenswrapper[4755]: I0202 23:25:58.894461 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2037e0d8-8de2-4e8f-b475-53b12144cbab-kube-api-access-7m8l2" (OuterVolumeSpecName: "kube-api-access-7m8l2") pod "2037e0d8-8de2-4e8f-b475-53b12144cbab" (UID: "2037e0d8-8de2-4e8f-b475-53b12144cbab"). InnerVolumeSpecName "kube-api-access-7m8l2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:25:58 crc kubenswrapper[4755]: I0202 23:25:58.935089 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2037e0d8-8de2-4e8f-b475-53b12144cbab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2037e0d8-8de2-4e8f-b475-53b12144cbab" (UID: "2037e0d8-8de2-4e8f-b475-53b12144cbab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:25:58 crc kubenswrapper[4755]: I0202 23:25:58.982369 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2037e0d8-8de2-4e8f-b475-53b12144cbab-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 23:25:58 crc kubenswrapper[4755]: I0202 23:25:58.982411 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2037e0d8-8de2-4e8f-b475-53b12144cbab-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 23:25:58 crc kubenswrapper[4755]: I0202 23:25:58.982425 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7m8l2\" (UniqueName: \"kubernetes.io/projected/2037e0d8-8de2-4e8f-b475-53b12144cbab-kube-api-access-7m8l2\") on node \"crc\" DevicePath \"\"" Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.069405 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:25:59 crc kubenswrapper[4755]: E0202 23:25:59.070144 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.183710 4755 generic.go:334] "Generic (PLEG): container finished" podID="2037e0d8-8de2-4e8f-b475-53b12144cbab" containerID="c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca" exitCode=0 Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.183764 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrs8g" event={"ID":"2037e0d8-8de2-4e8f-b475-53b12144cbab","Type":"ContainerDied","Data":"c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca"} Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.183813 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrs8g" event={"ID":"2037e0d8-8de2-4e8f-b475-53b12144cbab","Type":"ContainerDied","Data":"098c1596c7e9456f1a3a768a1d8068fa3e7c65bae2b15af79f0344d627259fde"} Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.183830 4755 scope.go:117] "RemoveContainer" containerID="c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca" Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.183855 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hrs8g" Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.230202 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hrs8g"] Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.230907 4755 scope.go:117] "RemoveContainer" containerID="fdcce82a5c304ead89a8f92d34928e0c8838432c10623988acd46dceaa5395fe" Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.248630 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hrs8g"] Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.268301 4755 scope.go:117] "RemoveContainer" containerID="96ac220a67dddef0aa70e80588ec04cffb57dfdf04e96cd70c31316ef9398577" Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.346409 4755 scope.go:117] "RemoveContainer" containerID="c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca" Feb 02 23:25:59 crc kubenswrapper[4755]: E0202 23:25:59.347216 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca\": container with ID starting with c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca not found: ID does not exist" containerID="c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca" Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.347411 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca"} err="failed to get container status \"c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca\": rpc error: code = NotFound desc = could not find container \"c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca\": container with ID starting with c0bed960333b97993d1fd5bf9373f801899ee210c71e1548d1c4276c1f16bdca not found: ID does not exist" Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.347628 4755 scope.go:117] "RemoveContainer" containerID="fdcce82a5c304ead89a8f92d34928e0c8838432c10623988acd46dceaa5395fe" Feb 02 23:25:59 crc kubenswrapper[4755]: E0202 23:25:59.348409 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdcce82a5c304ead89a8f92d34928e0c8838432c10623988acd46dceaa5395fe\": container with ID starting with fdcce82a5c304ead89a8f92d34928e0c8838432c10623988acd46dceaa5395fe not found: ID does not exist" containerID="fdcce82a5c304ead89a8f92d34928e0c8838432c10623988acd46dceaa5395fe" Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.348463 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdcce82a5c304ead89a8f92d34928e0c8838432c10623988acd46dceaa5395fe"} err="failed to get container status \"fdcce82a5c304ead89a8f92d34928e0c8838432c10623988acd46dceaa5395fe\": rpc error: code = NotFound desc = could not find container \"fdcce82a5c304ead89a8f92d34928e0c8838432c10623988acd46dceaa5395fe\": container with ID starting with fdcce82a5c304ead89a8f92d34928e0c8838432c10623988acd46dceaa5395fe not found: ID does not exist" Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.348492 4755 scope.go:117] "RemoveContainer" containerID="96ac220a67dddef0aa70e80588ec04cffb57dfdf04e96cd70c31316ef9398577" Feb 02 23:25:59 crc kubenswrapper[4755]: E0202 23:25:59.348898 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96ac220a67dddef0aa70e80588ec04cffb57dfdf04e96cd70c31316ef9398577\": container with ID starting with 96ac220a67dddef0aa70e80588ec04cffb57dfdf04e96cd70c31316ef9398577 not found: ID does not exist" containerID="96ac220a67dddef0aa70e80588ec04cffb57dfdf04e96cd70c31316ef9398577" Feb 02 23:25:59 crc kubenswrapper[4755]: I0202 23:25:59.349095 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96ac220a67dddef0aa70e80588ec04cffb57dfdf04e96cd70c31316ef9398577"} err="failed to get container status \"96ac220a67dddef0aa70e80588ec04cffb57dfdf04e96cd70c31316ef9398577\": rpc error: code = NotFound desc = could not find container \"96ac220a67dddef0aa70e80588ec04cffb57dfdf04e96cd70c31316ef9398577\": container with ID starting with 96ac220a67dddef0aa70e80588ec04cffb57dfdf04e96cd70c31316ef9398577 not found: ID does not exist" Feb 02 23:26:01 crc kubenswrapper[4755]: I0202 23:26:01.098895 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2037e0d8-8de2-4e8f-b475-53b12144cbab" path="/var/lib/kubelet/pods/2037e0d8-8de2-4e8f-b475-53b12144cbab/volumes" Feb 02 23:26:11 crc kubenswrapper[4755]: I0202 23:26:11.069521 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:26:11 crc kubenswrapper[4755]: E0202 23:26:11.070563 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:26:24 crc kubenswrapper[4755]: I0202 23:26:24.069622 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:26:24 crc kubenswrapper[4755]: E0202 23:26:24.070537 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:26:37 crc kubenswrapper[4755]: I0202 23:26:37.068444 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:26:37 crc kubenswrapper[4755]: E0202 23:26:37.069175 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:26:49 crc kubenswrapper[4755]: I0202 23:26:49.069200 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:26:49 crc kubenswrapper[4755]: E0202 23:26:49.070453 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:26:58 crc kubenswrapper[4755]: I0202 23:26:58.960265 4755 generic.go:334] "Generic (PLEG): container finished" podID="9759deb7-577e-46c5-b707-23b2025eec70" containerID="642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc" exitCode=0 Feb 02 23:26:58 crc kubenswrapper[4755]: I0202 23:26:58.960384 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vzkhl/must-gather-wxxgt" event={"ID":"9759deb7-577e-46c5-b707-23b2025eec70","Type":"ContainerDied","Data":"642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc"} Feb 02 23:26:58 crc kubenswrapper[4755]: I0202 23:26:58.961934 4755 scope.go:117] "RemoveContainer" containerID="642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc" Feb 02 23:26:59 crc kubenswrapper[4755]: I0202 23:26:59.943044 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-vzkhl_must-gather-wxxgt_9759deb7-577e-46c5-b707-23b2025eec70/gather/0.log" Feb 02 23:27:02 crc kubenswrapper[4755]: I0202 23:27:02.070284 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:27:02 crc kubenswrapper[4755]: E0202 23:27:02.071708 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:27:07 crc kubenswrapper[4755]: I0202 23:27:07.340711 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-vzkhl/must-gather-wxxgt"] Feb 02 23:27:07 crc kubenswrapper[4755]: I0202 23:27:07.342031 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-vzkhl/must-gather-wxxgt" podUID="9759deb7-577e-46c5-b707-23b2025eec70" containerName="copy" containerID="cri-o://6e802f55cf295d6735244d4ab0459f2d11e3bd8f659ef37453f7532ad576d752" gracePeriod=2 Feb 02 23:27:07 crc kubenswrapper[4755]: I0202 23:27:07.353341 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-vzkhl/must-gather-wxxgt"] Feb 02 23:27:07 crc kubenswrapper[4755]: I0202 23:27:07.826085 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-vzkhl_must-gather-wxxgt_9759deb7-577e-46c5-b707-23b2025eec70/copy/0.log" Feb 02 23:27:07 crc kubenswrapper[4755]: I0202 23:27:07.826784 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vzkhl/must-gather-wxxgt" Feb 02 23:27:07 crc kubenswrapper[4755]: I0202 23:27:07.892261 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnlz5\" (UniqueName: \"kubernetes.io/projected/9759deb7-577e-46c5-b707-23b2025eec70-kube-api-access-jnlz5\") pod \"9759deb7-577e-46c5-b707-23b2025eec70\" (UID: \"9759deb7-577e-46c5-b707-23b2025eec70\") " Feb 02 23:27:07 crc kubenswrapper[4755]: I0202 23:27:07.892350 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9759deb7-577e-46c5-b707-23b2025eec70-must-gather-output\") pod \"9759deb7-577e-46c5-b707-23b2025eec70\" (UID: \"9759deb7-577e-46c5-b707-23b2025eec70\") " Feb 02 23:27:07 crc kubenswrapper[4755]: I0202 23:27:07.898611 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9759deb7-577e-46c5-b707-23b2025eec70-kube-api-access-jnlz5" (OuterVolumeSpecName: "kube-api-access-jnlz5") pod "9759deb7-577e-46c5-b707-23b2025eec70" (UID: "9759deb7-577e-46c5-b707-23b2025eec70"). InnerVolumeSpecName "kube-api-access-jnlz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:27:07 crc kubenswrapper[4755]: I0202 23:27:07.996322 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnlz5\" (UniqueName: \"kubernetes.io/projected/9759deb7-577e-46c5-b707-23b2025eec70-kube-api-access-jnlz5\") on node \"crc\" DevicePath \"\"" Feb 02 23:27:08 crc kubenswrapper[4755]: I0202 23:27:08.081914 4755 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-vzkhl_must-gather-wxxgt_9759deb7-577e-46c5-b707-23b2025eec70/copy/0.log" Feb 02 23:27:08 crc kubenswrapper[4755]: I0202 23:27:08.082435 4755 generic.go:334] "Generic (PLEG): container finished" podID="9759deb7-577e-46c5-b707-23b2025eec70" containerID="6e802f55cf295d6735244d4ab0459f2d11e3bd8f659ef37453f7532ad576d752" exitCode=143 Feb 02 23:27:08 crc kubenswrapper[4755]: I0202 23:27:08.082501 4755 scope.go:117] "RemoveContainer" containerID="6e802f55cf295d6735244d4ab0459f2d11e3bd8f659ef37453f7532ad576d752" Feb 02 23:27:08 crc kubenswrapper[4755]: I0202 23:27:08.082524 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vzkhl/must-gather-wxxgt" Feb 02 23:27:08 crc kubenswrapper[4755]: I0202 23:27:08.087012 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9759deb7-577e-46c5-b707-23b2025eec70-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "9759deb7-577e-46c5-b707-23b2025eec70" (UID: "9759deb7-577e-46c5-b707-23b2025eec70"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:27:08 crc kubenswrapper[4755]: I0202 23:27:08.098518 4755 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9759deb7-577e-46c5-b707-23b2025eec70-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 02 23:27:08 crc kubenswrapper[4755]: I0202 23:27:08.113560 4755 scope.go:117] "RemoveContainer" containerID="642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc" Feb 02 23:27:08 crc kubenswrapper[4755]: I0202 23:27:08.240385 4755 scope.go:117] "RemoveContainer" containerID="6e802f55cf295d6735244d4ab0459f2d11e3bd8f659ef37453f7532ad576d752" Feb 02 23:27:08 crc kubenswrapper[4755]: E0202 23:27:08.243184 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e802f55cf295d6735244d4ab0459f2d11e3bd8f659ef37453f7532ad576d752\": container with ID starting with 6e802f55cf295d6735244d4ab0459f2d11e3bd8f659ef37453f7532ad576d752 not found: ID does not exist" containerID="6e802f55cf295d6735244d4ab0459f2d11e3bd8f659ef37453f7532ad576d752" Feb 02 23:27:08 crc kubenswrapper[4755]: I0202 23:27:08.243231 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e802f55cf295d6735244d4ab0459f2d11e3bd8f659ef37453f7532ad576d752"} err="failed to get container status \"6e802f55cf295d6735244d4ab0459f2d11e3bd8f659ef37453f7532ad576d752\": rpc error: code = NotFound desc = could not find container \"6e802f55cf295d6735244d4ab0459f2d11e3bd8f659ef37453f7532ad576d752\": container with ID starting with 6e802f55cf295d6735244d4ab0459f2d11e3bd8f659ef37453f7532ad576d752 not found: ID does not exist" Feb 02 23:27:08 crc kubenswrapper[4755]: I0202 23:27:08.243267 4755 scope.go:117] "RemoveContainer" containerID="642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc" Feb 02 23:27:08 crc kubenswrapper[4755]: E0202 23:27:08.243756 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc\": container with ID starting with 642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc not found: ID does not exist" containerID="642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc" Feb 02 23:27:08 crc kubenswrapper[4755]: I0202 23:27:08.243793 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc"} err="failed to get container status \"642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc\": rpc error: code = NotFound desc = could not find container \"642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc\": container with ID starting with 642ff9e100ae5667078161c140b860a57093253257053d46a8a0213fc55291fc not found: ID does not exist" Feb 02 23:27:09 crc kubenswrapper[4755]: I0202 23:27:09.086644 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9759deb7-577e-46c5-b707-23b2025eec70" path="/var/lib/kubelet/pods/9759deb7-577e-46c5-b707-23b2025eec70/volumes" Feb 02 23:27:13 crc kubenswrapper[4755]: I0202 23:27:13.069563 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:27:13 crc kubenswrapper[4755]: E0202 23:27:13.070567 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:27:27 crc kubenswrapper[4755]: I0202 23:27:27.070581 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:27:27 crc kubenswrapper[4755]: E0202 23:27:27.071766 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:27:39 crc kubenswrapper[4755]: I0202 23:27:39.069479 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:27:39 crc kubenswrapper[4755]: E0202 23:27:39.070799 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:27:53 crc kubenswrapper[4755]: I0202 23:27:53.070519 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:27:53 crc kubenswrapper[4755]: E0202 23:27:53.071964 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:28:06 crc kubenswrapper[4755]: I0202 23:28:06.069487 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:28:06 crc kubenswrapper[4755]: E0202 23:28:06.070649 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:28:20 crc kubenswrapper[4755]: I0202 23:28:20.069151 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:28:20 crc kubenswrapper[4755]: E0202 23:28:20.070437 4755 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-8q4mc_openshift-machine-config-operator(bc686b0f-8473-46b8-9d5e-abcddcca635f)\"" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" Feb 02 23:28:31 crc kubenswrapper[4755]: I0202 23:28:31.068912 4755 scope.go:117] "RemoveContainer" containerID="034d7ba5d0ce5477f351374274f686db0bd5723a33e1cdacfe10c883feaf40b0" Feb 02 23:28:32 crc kubenswrapper[4755]: I0202 23:28:32.038466 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" event={"ID":"bc686b0f-8473-46b8-9d5e-abcddcca635f","Type":"ContainerStarted","Data":"e00e037f04a51f30486fb5fe8da6648278d65d60a59c42de2a42eceeb4b06774"} Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.174119 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64"] Feb 02 23:30:00 crc kubenswrapper[4755]: E0202 23:30:00.175153 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9759deb7-577e-46c5-b707-23b2025eec70" containerName="copy" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.175170 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9759deb7-577e-46c5-b707-23b2025eec70" containerName="copy" Feb 02 23:30:00 crc kubenswrapper[4755]: E0202 23:30:00.175198 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2037e0d8-8de2-4e8f-b475-53b12144cbab" containerName="registry-server" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.175206 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2037e0d8-8de2-4e8f-b475-53b12144cbab" containerName="registry-server" Feb 02 23:30:00 crc kubenswrapper[4755]: E0202 23:30:00.175229 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2037e0d8-8de2-4e8f-b475-53b12144cbab" containerName="extract-utilities" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.175240 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2037e0d8-8de2-4e8f-b475-53b12144cbab" containerName="extract-utilities" Feb 02 23:30:00 crc kubenswrapper[4755]: E0202 23:30:00.175257 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2037e0d8-8de2-4e8f-b475-53b12144cbab" containerName="extract-content" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.175265 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="2037e0d8-8de2-4e8f-b475-53b12144cbab" containerName="extract-content" Feb 02 23:30:00 crc kubenswrapper[4755]: E0202 23:30:00.175283 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9759deb7-577e-46c5-b707-23b2025eec70" containerName="gather" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.175290 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9759deb7-577e-46c5-b707-23b2025eec70" containerName="gather" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.175593 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="9759deb7-577e-46c5-b707-23b2025eec70" containerName="copy" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.175619 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="2037e0d8-8de2-4e8f-b475-53b12144cbab" containerName="registry-server" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.175632 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="9759deb7-577e-46c5-b707-23b2025eec70" containerName="gather" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.176528 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.178927 4755 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.185456 4755 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.188900 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64"] Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.310934 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmf62\" (UniqueName: \"kubernetes.io/projected/464abe61-184f-4b10-83e1-2618274cf597-kube-api-access-vmf62\") pod \"collect-profiles-29501250-p8j64\" (UID: \"464abe61-184f-4b10-83e1-2618274cf597\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.310993 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/464abe61-184f-4b10-83e1-2618274cf597-config-volume\") pod \"collect-profiles-29501250-p8j64\" (UID: \"464abe61-184f-4b10-83e1-2618274cf597\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.311020 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/464abe61-184f-4b10-83e1-2618274cf597-secret-volume\") pod \"collect-profiles-29501250-p8j64\" (UID: \"464abe61-184f-4b10-83e1-2618274cf597\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.412693 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmf62\" (UniqueName: \"kubernetes.io/projected/464abe61-184f-4b10-83e1-2618274cf597-kube-api-access-vmf62\") pod \"collect-profiles-29501250-p8j64\" (UID: \"464abe61-184f-4b10-83e1-2618274cf597\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.412796 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/464abe61-184f-4b10-83e1-2618274cf597-config-volume\") pod \"collect-profiles-29501250-p8j64\" (UID: \"464abe61-184f-4b10-83e1-2618274cf597\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.412891 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/464abe61-184f-4b10-83e1-2618274cf597-secret-volume\") pod \"collect-profiles-29501250-p8j64\" (UID: \"464abe61-184f-4b10-83e1-2618274cf597\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.413902 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/464abe61-184f-4b10-83e1-2618274cf597-config-volume\") pod \"collect-profiles-29501250-p8j64\" (UID: \"464abe61-184f-4b10-83e1-2618274cf597\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.420692 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/464abe61-184f-4b10-83e1-2618274cf597-secret-volume\") pod \"collect-profiles-29501250-p8j64\" (UID: \"464abe61-184f-4b10-83e1-2618274cf597\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.436934 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmf62\" (UniqueName: \"kubernetes.io/projected/464abe61-184f-4b10-83e1-2618274cf597-kube-api-access-vmf62\") pod \"collect-profiles-29501250-p8j64\" (UID: \"464abe61-184f-4b10-83e1-2618274cf597\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:00 crc kubenswrapper[4755]: I0202 23:30:00.505313 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:01 crc kubenswrapper[4755]: I0202 23:30:01.013356 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64"] Feb 02 23:30:01 crc kubenswrapper[4755]: I0202 23:30:01.034213 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" event={"ID":"464abe61-184f-4b10-83e1-2618274cf597","Type":"ContainerStarted","Data":"8aa524ca82d2aeb772cc0753259312a75fe3025cd3838af28d0689c030574c77"} Feb 02 23:30:02 crc kubenswrapper[4755]: I0202 23:30:02.047938 4755 generic.go:334] "Generic (PLEG): container finished" podID="464abe61-184f-4b10-83e1-2618274cf597" containerID="77b67d05d89bdfd0bec6b3d1ef6c002a9dcb7a34c4c83acc0198015479e650e3" exitCode=0 Feb 02 23:30:02 crc kubenswrapper[4755]: I0202 23:30:02.048044 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" event={"ID":"464abe61-184f-4b10-83e1-2618274cf597","Type":"ContainerDied","Data":"77b67d05d89bdfd0bec6b3d1ef6c002a9dcb7a34c4c83acc0198015479e650e3"} Feb 02 23:30:03 crc kubenswrapper[4755]: I0202 23:30:03.549873 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:03 crc kubenswrapper[4755]: I0202 23:30:03.688925 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmf62\" (UniqueName: \"kubernetes.io/projected/464abe61-184f-4b10-83e1-2618274cf597-kube-api-access-vmf62\") pod \"464abe61-184f-4b10-83e1-2618274cf597\" (UID: \"464abe61-184f-4b10-83e1-2618274cf597\") " Feb 02 23:30:03 crc kubenswrapper[4755]: I0202 23:30:03.689243 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/464abe61-184f-4b10-83e1-2618274cf597-secret-volume\") pod \"464abe61-184f-4b10-83e1-2618274cf597\" (UID: \"464abe61-184f-4b10-83e1-2618274cf597\") " Feb 02 23:30:03 crc kubenswrapper[4755]: I0202 23:30:03.689337 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/464abe61-184f-4b10-83e1-2618274cf597-config-volume\") pod \"464abe61-184f-4b10-83e1-2618274cf597\" (UID: \"464abe61-184f-4b10-83e1-2618274cf597\") " Feb 02 23:30:03 crc kubenswrapper[4755]: I0202 23:30:03.690763 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/464abe61-184f-4b10-83e1-2618274cf597-config-volume" (OuterVolumeSpecName: "config-volume") pod "464abe61-184f-4b10-83e1-2618274cf597" (UID: "464abe61-184f-4b10-83e1-2618274cf597"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 23:30:03 crc kubenswrapper[4755]: I0202 23:30:03.698191 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/464abe61-184f-4b10-83e1-2618274cf597-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "464abe61-184f-4b10-83e1-2618274cf597" (UID: "464abe61-184f-4b10-83e1-2618274cf597"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 23:30:03 crc kubenswrapper[4755]: I0202 23:30:03.698554 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/464abe61-184f-4b10-83e1-2618274cf597-kube-api-access-vmf62" (OuterVolumeSpecName: "kube-api-access-vmf62") pod "464abe61-184f-4b10-83e1-2618274cf597" (UID: "464abe61-184f-4b10-83e1-2618274cf597"). InnerVolumeSpecName "kube-api-access-vmf62". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:30:03 crc kubenswrapper[4755]: I0202 23:30:03.791354 4755 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/464abe61-184f-4b10-83e1-2618274cf597-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 23:30:03 crc kubenswrapper[4755]: I0202 23:30:03.791587 4755 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/464abe61-184f-4b10-83e1-2618274cf597-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 23:30:03 crc kubenswrapper[4755]: I0202 23:30:03.791656 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmf62\" (UniqueName: \"kubernetes.io/projected/464abe61-184f-4b10-83e1-2618274cf597-kube-api-access-vmf62\") on node \"crc\" DevicePath \"\"" Feb 02 23:30:04 crc kubenswrapper[4755]: I0202 23:30:04.074633 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" event={"ID":"464abe61-184f-4b10-83e1-2618274cf597","Type":"ContainerDied","Data":"8aa524ca82d2aeb772cc0753259312a75fe3025cd3838af28d0689c030574c77"} Feb 02 23:30:04 crc kubenswrapper[4755]: I0202 23:30:04.074699 4755 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8aa524ca82d2aeb772cc0753259312a75fe3025cd3838af28d0689c030574c77" Feb 02 23:30:04 crc kubenswrapper[4755]: I0202 23:30:04.074700 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29501250-p8j64" Feb 02 23:30:04 crc kubenswrapper[4755]: I0202 23:30:04.635329 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc"] Feb 02 23:30:04 crc kubenswrapper[4755]: I0202 23:30:04.644334 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29501205-cr5zc"] Feb 02 23:30:05 crc kubenswrapper[4755]: I0202 23:30:05.084324 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f623968-924c-4daa-acc5-9dcc77105d07" path="/var/lib/kubelet/pods/6f623968-924c-4daa-acc5-9dcc77105d07/volumes" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.090691 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9cmc2"] Feb 02 23:30:09 crc kubenswrapper[4755]: E0202 23:30:09.092346 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="464abe61-184f-4b10-83e1-2618274cf597" containerName="collect-profiles" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.092382 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="464abe61-184f-4b10-83e1-2618274cf597" containerName="collect-profiles" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.092911 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="464abe61-184f-4b10-83e1-2618274cf597" containerName="collect-profiles" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.096426 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9cmc2"] Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.096608 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.228869 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4233099-9bf7-414e-ba92-b4b361c63199-catalog-content\") pod \"redhat-operators-9cmc2\" (UID: \"b4233099-9bf7-414e-ba92-b4b361c63199\") " pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.228936 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z65zk\" (UniqueName: \"kubernetes.io/projected/b4233099-9bf7-414e-ba92-b4b361c63199-kube-api-access-z65zk\") pod \"redhat-operators-9cmc2\" (UID: \"b4233099-9bf7-414e-ba92-b4b361c63199\") " pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.229030 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4233099-9bf7-414e-ba92-b4b361c63199-utilities\") pod \"redhat-operators-9cmc2\" (UID: \"b4233099-9bf7-414e-ba92-b4b361c63199\") " pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.331837 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4233099-9bf7-414e-ba92-b4b361c63199-utilities\") pod \"redhat-operators-9cmc2\" (UID: \"b4233099-9bf7-414e-ba92-b4b361c63199\") " pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.332179 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4233099-9bf7-414e-ba92-b4b361c63199-catalog-content\") pod \"redhat-operators-9cmc2\" (UID: \"b4233099-9bf7-414e-ba92-b4b361c63199\") " pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.332236 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z65zk\" (UniqueName: \"kubernetes.io/projected/b4233099-9bf7-414e-ba92-b4b361c63199-kube-api-access-z65zk\") pod \"redhat-operators-9cmc2\" (UID: \"b4233099-9bf7-414e-ba92-b4b361c63199\") " pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.332544 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4233099-9bf7-414e-ba92-b4b361c63199-utilities\") pod \"redhat-operators-9cmc2\" (UID: \"b4233099-9bf7-414e-ba92-b4b361c63199\") " pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.332864 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4233099-9bf7-414e-ba92-b4b361c63199-catalog-content\") pod \"redhat-operators-9cmc2\" (UID: \"b4233099-9bf7-414e-ba92-b4b361c63199\") " pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.355513 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z65zk\" (UniqueName: \"kubernetes.io/projected/b4233099-9bf7-414e-ba92-b4b361c63199-kube-api-access-z65zk\") pod \"redhat-operators-9cmc2\" (UID: \"b4233099-9bf7-414e-ba92-b4b361c63199\") " pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:09 crc kubenswrapper[4755]: I0202 23:30:09.430834 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:10 crc kubenswrapper[4755]: I0202 23:30:10.447560 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9cmc2"] Feb 02 23:30:11 crc kubenswrapper[4755]: I0202 23:30:11.199418 4755 generic.go:334] "Generic (PLEG): container finished" podID="b4233099-9bf7-414e-ba92-b4b361c63199" containerID="cf154e707567c57bed37298330872207ddb8b0bdc5a9bbf8775980ac16f6770c" exitCode=0 Feb 02 23:30:11 crc kubenswrapper[4755]: I0202 23:30:11.199469 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9cmc2" event={"ID":"b4233099-9bf7-414e-ba92-b4b361c63199","Type":"ContainerDied","Data":"cf154e707567c57bed37298330872207ddb8b0bdc5a9bbf8775980ac16f6770c"} Feb 02 23:30:11 crc kubenswrapper[4755]: I0202 23:30:11.199885 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9cmc2" event={"ID":"b4233099-9bf7-414e-ba92-b4b361c63199","Type":"ContainerStarted","Data":"7bf0a4e74c8fe8738709486022e3ed38e3d1cec2e61b62f8b4a5ea35e17188bd"} Feb 02 23:30:12 crc kubenswrapper[4755]: I0202 23:30:12.214571 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9cmc2" event={"ID":"b4233099-9bf7-414e-ba92-b4b361c63199","Type":"ContainerStarted","Data":"1c0aaed5db33caa30736c88122dcfcf6957d5877336fecf2d86b0614011c9755"} Feb 02 23:30:12 crc kubenswrapper[4755]: I0202 23:30:12.457214 4755 scope.go:117] "RemoveContainer" containerID="18facd827b868c3e5eafa2e3ad15d359b5152ca0738a65615eaf7a3707e8a709" Feb 02 23:30:16 crc kubenswrapper[4755]: I0202 23:30:16.258953 4755 generic.go:334] "Generic (PLEG): container finished" podID="b4233099-9bf7-414e-ba92-b4b361c63199" containerID="1c0aaed5db33caa30736c88122dcfcf6957d5877336fecf2d86b0614011c9755" exitCode=0 Feb 02 23:30:16 crc kubenswrapper[4755]: I0202 23:30:16.259034 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9cmc2" event={"ID":"b4233099-9bf7-414e-ba92-b4b361c63199","Type":"ContainerDied","Data":"1c0aaed5db33caa30736c88122dcfcf6957d5877336fecf2d86b0614011c9755"} Feb 02 23:30:17 crc kubenswrapper[4755]: I0202 23:30:17.273595 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9cmc2" event={"ID":"b4233099-9bf7-414e-ba92-b4b361c63199","Type":"ContainerStarted","Data":"478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401"} Feb 02 23:30:17 crc kubenswrapper[4755]: I0202 23:30:17.300141 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9cmc2" podStartSLOduration=2.6241886770000002 podStartE2EDuration="8.300123446s" podCreationTimestamp="2026-02-02 23:30:09 +0000 UTC" firstStartedPulling="2026-02-02 23:30:11.201486865 +0000 UTC m=+3366.892707211" lastFinishedPulling="2026-02-02 23:30:16.877421614 +0000 UTC m=+3372.568641980" observedRunningTime="2026-02-02 23:30:17.2970853 +0000 UTC m=+3372.988305636" watchObservedRunningTime="2026-02-02 23:30:17.300123446 +0000 UTC m=+3372.991343772" Feb 02 23:30:19 crc kubenswrapper[4755]: I0202 23:30:19.431071 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:19 crc kubenswrapper[4755]: I0202 23:30:19.431434 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:20 crc kubenswrapper[4755]: I0202 23:30:20.510159 4755 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9cmc2" podUID="b4233099-9bf7-414e-ba92-b4b361c63199" containerName="registry-server" probeResult="failure" output=< Feb 02 23:30:20 crc kubenswrapper[4755]: timeout: failed to connect service ":50051" within 1s Feb 02 23:30:20 crc kubenswrapper[4755]: > Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.085076 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bls6g"] Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.104250 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.108653 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bls6g"] Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.170793 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ac2d64d-8326-4424-bb57-b079b848232b-catalog-content\") pod \"redhat-marketplace-bls6g\" (UID: \"4ac2d64d-8326-4424-bb57-b079b848232b\") " pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.170852 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ds6zv\" (UniqueName: \"kubernetes.io/projected/4ac2d64d-8326-4424-bb57-b079b848232b-kube-api-access-ds6zv\") pod \"redhat-marketplace-bls6g\" (UID: \"4ac2d64d-8326-4424-bb57-b079b848232b\") " pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.170990 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ac2d64d-8326-4424-bb57-b079b848232b-utilities\") pod \"redhat-marketplace-bls6g\" (UID: \"4ac2d64d-8326-4424-bb57-b079b848232b\") " pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.272502 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ac2d64d-8326-4424-bb57-b079b848232b-catalog-content\") pod \"redhat-marketplace-bls6g\" (UID: \"4ac2d64d-8326-4424-bb57-b079b848232b\") " pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.272858 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ds6zv\" (UniqueName: \"kubernetes.io/projected/4ac2d64d-8326-4424-bb57-b079b848232b-kube-api-access-ds6zv\") pod \"redhat-marketplace-bls6g\" (UID: \"4ac2d64d-8326-4424-bb57-b079b848232b\") " pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.272991 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ac2d64d-8326-4424-bb57-b079b848232b-utilities\") pod \"redhat-marketplace-bls6g\" (UID: \"4ac2d64d-8326-4424-bb57-b079b848232b\") " pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.273165 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ac2d64d-8326-4424-bb57-b079b848232b-catalog-content\") pod \"redhat-marketplace-bls6g\" (UID: \"4ac2d64d-8326-4424-bb57-b079b848232b\") " pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.273535 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ac2d64d-8326-4424-bb57-b079b848232b-utilities\") pod \"redhat-marketplace-bls6g\" (UID: \"4ac2d64d-8326-4424-bb57-b079b848232b\") " pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.300557 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ds6zv\" (UniqueName: \"kubernetes.io/projected/4ac2d64d-8326-4424-bb57-b079b848232b-kube-api-access-ds6zv\") pod \"redhat-marketplace-bls6g\" (UID: \"4ac2d64d-8326-4424-bb57-b079b848232b\") " pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.424150 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:23 crc kubenswrapper[4755]: I0202 23:30:23.907094 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bls6g"] Feb 02 23:30:24 crc kubenswrapper[4755]: I0202 23:30:24.359509 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bls6g" event={"ID":"4ac2d64d-8326-4424-bb57-b079b848232b","Type":"ContainerStarted","Data":"29a2ff1393213433659fafbf07494317601170f7c52682428efeec4c57e54389"} Feb 02 23:30:24 crc kubenswrapper[4755]: I0202 23:30:24.359867 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bls6g" event={"ID":"4ac2d64d-8326-4424-bb57-b079b848232b","Type":"ContainerStarted","Data":"21140e5edaff73e518af552cb8e3cdc9ad92d918ad6b0512996a48d6fd904518"} Feb 02 23:30:25 crc kubenswrapper[4755]: I0202 23:30:25.380601 4755 generic.go:334] "Generic (PLEG): container finished" podID="4ac2d64d-8326-4424-bb57-b079b848232b" containerID="29a2ff1393213433659fafbf07494317601170f7c52682428efeec4c57e54389" exitCode=0 Feb 02 23:30:25 crc kubenswrapper[4755]: I0202 23:30:25.380781 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bls6g" event={"ID":"4ac2d64d-8326-4424-bb57-b079b848232b","Type":"ContainerDied","Data":"29a2ff1393213433659fafbf07494317601170f7c52682428efeec4c57e54389"} Feb 02 23:30:29 crc kubenswrapper[4755]: I0202 23:30:29.504669 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:29 crc kubenswrapper[4755]: I0202 23:30:29.569051 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:29 crc kubenswrapper[4755]: I0202 23:30:29.747453 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9cmc2"] Feb 02 23:30:30 crc kubenswrapper[4755]: I0202 23:30:30.480306 4755 generic.go:334] "Generic (PLEG): container finished" podID="4ac2d64d-8326-4424-bb57-b079b848232b" containerID="5885a7477305d63f4636ac2aa963bb7575df4d9015d62f0f1c2c1db70b7a6e8b" exitCode=0 Feb 02 23:30:30 crc kubenswrapper[4755]: I0202 23:30:30.482225 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bls6g" event={"ID":"4ac2d64d-8326-4424-bb57-b079b848232b","Type":"ContainerDied","Data":"5885a7477305d63f4636ac2aa963bb7575df4d9015d62f0f1c2c1db70b7a6e8b"} Feb 02 23:30:31 crc kubenswrapper[4755]: I0202 23:30:31.498276 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bls6g" event={"ID":"4ac2d64d-8326-4424-bb57-b079b848232b","Type":"ContainerStarted","Data":"8841c54e4a3b06b8d951b88d3204c4c62dc60214848b83c950ab873ca87df010"} Feb 02 23:30:31 crc kubenswrapper[4755]: I0202 23:30:31.498415 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9cmc2" podUID="b4233099-9bf7-414e-ba92-b4b361c63199" containerName="registry-server" containerID="cri-o://478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401" gracePeriod=2 Feb 02 23:30:31 crc kubenswrapper[4755]: I0202 23:30:31.539615 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bls6g" podStartSLOduration=2.979791274 podStartE2EDuration="8.539591658s" podCreationTimestamp="2026-02-02 23:30:23 +0000 UTC" firstStartedPulling="2026-02-02 23:30:25.385367539 +0000 UTC m=+3381.076587905" lastFinishedPulling="2026-02-02 23:30:30.945167933 +0000 UTC m=+3386.636388289" observedRunningTime="2026-02-02 23:30:31.533808425 +0000 UTC m=+3387.225028751" watchObservedRunningTime="2026-02-02 23:30:31.539591658 +0000 UTC m=+3387.230811994" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.032123 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.178833 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z65zk\" (UniqueName: \"kubernetes.io/projected/b4233099-9bf7-414e-ba92-b4b361c63199-kube-api-access-z65zk\") pod \"b4233099-9bf7-414e-ba92-b4b361c63199\" (UID: \"b4233099-9bf7-414e-ba92-b4b361c63199\") " Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.179450 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4233099-9bf7-414e-ba92-b4b361c63199-utilities\") pod \"b4233099-9bf7-414e-ba92-b4b361c63199\" (UID: \"b4233099-9bf7-414e-ba92-b4b361c63199\") " Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.179489 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4233099-9bf7-414e-ba92-b4b361c63199-catalog-content\") pod \"b4233099-9bf7-414e-ba92-b4b361c63199\" (UID: \"b4233099-9bf7-414e-ba92-b4b361c63199\") " Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.180010 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4233099-9bf7-414e-ba92-b4b361c63199-utilities" (OuterVolumeSpecName: "utilities") pod "b4233099-9bf7-414e-ba92-b4b361c63199" (UID: "b4233099-9bf7-414e-ba92-b4b361c63199"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.186249 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4233099-9bf7-414e-ba92-b4b361c63199-kube-api-access-z65zk" (OuterVolumeSpecName: "kube-api-access-z65zk") pod "b4233099-9bf7-414e-ba92-b4b361c63199" (UID: "b4233099-9bf7-414e-ba92-b4b361c63199"). InnerVolumeSpecName "kube-api-access-z65zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.279013 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4233099-9bf7-414e-ba92-b4b361c63199-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4233099-9bf7-414e-ba92-b4b361c63199" (UID: "b4233099-9bf7-414e-ba92-b4b361c63199"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.282290 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4233099-9bf7-414e-ba92-b4b361c63199-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.282319 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4233099-9bf7-414e-ba92-b4b361c63199-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.282332 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z65zk\" (UniqueName: \"kubernetes.io/projected/b4233099-9bf7-414e-ba92-b4b361c63199-kube-api-access-z65zk\") on node \"crc\" DevicePath \"\"" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.509302 4755 generic.go:334] "Generic (PLEG): container finished" podID="b4233099-9bf7-414e-ba92-b4b361c63199" containerID="478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401" exitCode=0 Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.509371 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9cmc2" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.509367 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9cmc2" event={"ID":"b4233099-9bf7-414e-ba92-b4b361c63199","Type":"ContainerDied","Data":"478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401"} Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.509428 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9cmc2" event={"ID":"b4233099-9bf7-414e-ba92-b4b361c63199","Type":"ContainerDied","Data":"7bf0a4e74c8fe8738709486022e3ed38e3d1cec2e61b62f8b4a5ea35e17188bd"} Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.509460 4755 scope.go:117] "RemoveContainer" containerID="478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.537242 4755 scope.go:117] "RemoveContainer" containerID="1c0aaed5db33caa30736c88122dcfcf6957d5877336fecf2d86b0614011c9755" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.544768 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9cmc2"] Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.555294 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9cmc2"] Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.563211 4755 scope.go:117] "RemoveContainer" containerID="cf154e707567c57bed37298330872207ddb8b0bdc5a9bbf8775980ac16f6770c" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.633175 4755 scope.go:117] "RemoveContainer" containerID="478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401" Feb 02 23:30:32 crc kubenswrapper[4755]: E0202 23:30:32.633774 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401\": container with ID starting with 478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401 not found: ID does not exist" containerID="478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.633838 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401"} err="failed to get container status \"478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401\": rpc error: code = NotFound desc = could not find container \"478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401\": container with ID starting with 478e348781d84225d66dabe8306c9731e1a7c9198157e8d82630adad99027401 not found: ID does not exist" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.633883 4755 scope.go:117] "RemoveContainer" containerID="1c0aaed5db33caa30736c88122dcfcf6957d5877336fecf2d86b0614011c9755" Feb 02 23:30:32 crc kubenswrapper[4755]: E0202 23:30:32.634351 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c0aaed5db33caa30736c88122dcfcf6957d5877336fecf2d86b0614011c9755\": container with ID starting with 1c0aaed5db33caa30736c88122dcfcf6957d5877336fecf2d86b0614011c9755 not found: ID does not exist" containerID="1c0aaed5db33caa30736c88122dcfcf6957d5877336fecf2d86b0614011c9755" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.634393 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c0aaed5db33caa30736c88122dcfcf6957d5877336fecf2d86b0614011c9755"} err="failed to get container status \"1c0aaed5db33caa30736c88122dcfcf6957d5877336fecf2d86b0614011c9755\": rpc error: code = NotFound desc = could not find container \"1c0aaed5db33caa30736c88122dcfcf6957d5877336fecf2d86b0614011c9755\": container with ID starting with 1c0aaed5db33caa30736c88122dcfcf6957d5877336fecf2d86b0614011c9755 not found: ID does not exist" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.634419 4755 scope.go:117] "RemoveContainer" containerID="cf154e707567c57bed37298330872207ddb8b0bdc5a9bbf8775980ac16f6770c" Feb 02 23:30:32 crc kubenswrapper[4755]: E0202 23:30:32.634762 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf154e707567c57bed37298330872207ddb8b0bdc5a9bbf8775980ac16f6770c\": container with ID starting with cf154e707567c57bed37298330872207ddb8b0bdc5a9bbf8775980ac16f6770c not found: ID does not exist" containerID="cf154e707567c57bed37298330872207ddb8b0bdc5a9bbf8775980ac16f6770c" Feb 02 23:30:32 crc kubenswrapper[4755]: I0202 23:30:32.634790 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf154e707567c57bed37298330872207ddb8b0bdc5a9bbf8775980ac16f6770c"} err="failed to get container status \"cf154e707567c57bed37298330872207ddb8b0bdc5a9bbf8775980ac16f6770c\": rpc error: code = NotFound desc = could not find container \"cf154e707567c57bed37298330872207ddb8b0bdc5a9bbf8775980ac16f6770c\": container with ID starting with cf154e707567c57bed37298330872207ddb8b0bdc5a9bbf8775980ac16f6770c not found: ID does not exist" Feb 02 23:30:33 crc kubenswrapper[4755]: I0202 23:30:33.090617 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4233099-9bf7-414e-ba92-b4b361c63199" path="/var/lib/kubelet/pods/b4233099-9bf7-414e-ba92-b4b361c63199/volumes" Feb 02 23:30:33 crc kubenswrapper[4755]: I0202 23:30:33.424376 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:33 crc kubenswrapper[4755]: I0202 23:30:33.424670 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:33 crc kubenswrapper[4755]: I0202 23:30:33.482216 4755 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:43 crc kubenswrapper[4755]: I0202 23:30:43.493856 4755 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bls6g" Feb 02 23:30:43 crc kubenswrapper[4755]: I0202 23:30:43.623773 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bls6g"] Feb 02 23:30:43 crc kubenswrapper[4755]: I0202 23:30:43.662210 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2mpx7"] Feb 02 23:30:43 crc kubenswrapper[4755]: I0202 23:30:43.662447 4755 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2mpx7" podUID="9db9f5df-c29a-49c2-9130-f5066595eb43" containerName="registry-server" containerID="cri-o://bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0" gracePeriod=2 Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.144819 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.244799 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vz7mf\" (UniqueName: \"kubernetes.io/projected/9db9f5df-c29a-49c2-9130-f5066595eb43-kube-api-access-vz7mf\") pod \"9db9f5df-c29a-49c2-9130-f5066595eb43\" (UID: \"9db9f5df-c29a-49c2-9130-f5066595eb43\") " Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.244999 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9db9f5df-c29a-49c2-9130-f5066595eb43-catalog-content\") pod \"9db9f5df-c29a-49c2-9130-f5066595eb43\" (UID: \"9db9f5df-c29a-49c2-9130-f5066595eb43\") " Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.245039 4755 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9db9f5df-c29a-49c2-9130-f5066595eb43-utilities\") pod \"9db9f5df-c29a-49c2-9130-f5066595eb43\" (UID: \"9db9f5df-c29a-49c2-9130-f5066595eb43\") " Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.247797 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9db9f5df-c29a-49c2-9130-f5066595eb43-utilities" (OuterVolumeSpecName: "utilities") pod "9db9f5df-c29a-49c2-9130-f5066595eb43" (UID: "9db9f5df-c29a-49c2-9130-f5066595eb43"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.253861 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9db9f5df-c29a-49c2-9130-f5066595eb43-kube-api-access-vz7mf" (OuterVolumeSpecName: "kube-api-access-vz7mf") pod "9db9f5df-c29a-49c2-9130-f5066595eb43" (UID: "9db9f5df-c29a-49c2-9130-f5066595eb43"). InnerVolumeSpecName "kube-api-access-vz7mf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.276150 4755 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9db9f5df-c29a-49c2-9130-f5066595eb43-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9db9f5df-c29a-49c2-9130-f5066595eb43" (UID: "9db9f5df-c29a-49c2-9130-f5066595eb43"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.347367 4755 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9db9f5df-c29a-49c2-9130-f5066595eb43-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.347399 4755 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9db9f5df-c29a-49c2-9130-f5066595eb43-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.347410 4755 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vz7mf\" (UniqueName: \"kubernetes.io/projected/9db9f5df-c29a-49c2-9130-f5066595eb43-kube-api-access-vz7mf\") on node \"crc\" DevicePath \"\"" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.671825 4755 generic.go:334] "Generic (PLEG): container finished" podID="9db9f5df-c29a-49c2-9130-f5066595eb43" containerID="bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0" exitCode=0 Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.671874 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2mpx7" event={"ID":"9db9f5df-c29a-49c2-9130-f5066595eb43","Type":"ContainerDied","Data":"bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0"} Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.671929 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2mpx7" event={"ID":"9db9f5df-c29a-49c2-9130-f5066595eb43","Type":"ContainerDied","Data":"61f397f75e7ca7fdbc22be8bf8d2e6ec7522b63d8b04322d2d468af41945cf32"} Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.671959 4755 scope.go:117] "RemoveContainer" containerID="bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.672922 4755 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2mpx7" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.697564 4755 scope.go:117] "RemoveContainer" containerID="9e0397a1aba5a0a0d7a4d6f69522a85c14d9b83e55d85a6a3f9483adbfbca401" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.715786 4755 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2mpx7"] Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.724533 4755 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2mpx7"] Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.725225 4755 scope.go:117] "RemoveContainer" containerID="866f8ba1459ea753ed9951eda2b869586e37d998201ea066a5d75555bc655b77" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.786596 4755 scope.go:117] "RemoveContainer" containerID="bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0" Feb 02 23:30:44 crc kubenswrapper[4755]: E0202 23:30:44.787035 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0\": container with ID starting with bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0 not found: ID does not exist" containerID="bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.787087 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0"} err="failed to get container status \"bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0\": rpc error: code = NotFound desc = could not find container \"bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0\": container with ID starting with bacdee406397bb43020a2b92cf39d275bd1f13fcec51fb3f7ed18a6943f09ae0 not found: ID does not exist" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.787120 4755 scope.go:117] "RemoveContainer" containerID="9e0397a1aba5a0a0d7a4d6f69522a85c14d9b83e55d85a6a3f9483adbfbca401" Feb 02 23:30:44 crc kubenswrapper[4755]: E0202 23:30:44.787360 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e0397a1aba5a0a0d7a4d6f69522a85c14d9b83e55d85a6a3f9483adbfbca401\": container with ID starting with 9e0397a1aba5a0a0d7a4d6f69522a85c14d9b83e55d85a6a3f9483adbfbca401 not found: ID does not exist" containerID="9e0397a1aba5a0a0d7a4d6f69522a85c14d9b83e55d85a6a3f9483adbfbca401" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.787385 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e0397a1aba5a0a0d7a4d6f69522a85c14d9b83e55d85a6a3f9483adbfbca401"} err="failed to get container status \"9e0397a1aba5a0a0d7a4d6f69522a85c14d9b83e55d85a6a3f9483adbfbca401\": rpc error: code = NotFound desc = could not find container \"9e0397a1aba5a0a0d7a4d6f69522a85c14d9b83e55d85a6a3f9483adbfbca401\": container with ID starting with 9e0397a1aba5a0a0d7a4d6f69522a85c14d9b83e55d85a6a3f9483adbfbca401 not found: ID does not exist" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.787401 4755 scope.go:117] "RemoveContainer" containerID="866f8ba1459ea753ed9951eda2b869586e37d998201ea066a5d75555bc655b77" Feb 02 23:30:44 crc kubenswrapper[4755]: E0202 23:30:44.787549 4755 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"866f8ba1459ea753ed9951eda2b869586e37d998201ea066a5d75555bc655b77\": container with ID starting with 866f8ba1459ea753ed9951eda2b869586e37d998201ea066a5d75555bc655b77 not found: ID does not exist" containerID="866f8ba1459ea753ed9951eda2b869586e37d998201ea066a5d75555bc655b77" Feb 02 23:30:44 crc kubenswrapper[4755]: I0202 23:30:44.787568 4755 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"866f8ba1459ea753ed9951eda2b869586e37d998201ea066a5d75555bc655b77"} err="failed to get container status \"866f8ba1459ea753ed9951eda2b869586e37d998201ea066a5d75555bc655b77\": rpc error: code = NotFound desc = could not find container \"866f8ba1459ea753ed9951eda2b869586e37d998201ea066a5d75555bc655b77\": container with ID starting with 866f8ba1459ea753ed9951eda2b869586e37d998201ea066a5d75555bc655b77 not found: ID does not exist" Feb 02 23:30:45 crc kubenswrapper[4755]: I0202 23:30:45.079764 4755 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9db9f5df-c29a-49c2-9130-f5066595eb43" path="/var/lib/kubelet/pods/9db9f5df-c29a-49c2-9130-f5066595eb43/volumes" Feb 02 23:30:53 crc kubenswrapper[4755]: I0202 23:30:53.389164 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:30:53 crc kubenswrapper[4755]: I0202 23:30:53.389703 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.746416 4755 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kk4hf"] Feb 02 23:31:20 crc kubenswrapper[4755]: E0202 23:31:20.747482 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4233099-9bf7-414e-ba92-b4b361c63199" containerName="extract-utilities" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.747496 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4233099-9bf7-414e-ba92-b4b361c63199" containerName="extract-utilities" Feb 02 23:31:20 crc kubenswrapper[4755]: E0202 23:31:20.747512 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db9f5df-c29a-49c2-9130-f5066595eb43" containerName="registry-server" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.747518 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db9f5df-c29a-49c2-9130-f5066595eb43" containerName="registry-server" Feb 02 23:31:20 crc kubenswrapper[4755]: E0202 23:31:20.747529 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4233099-9bf7-414e-ba92-b4b361c63199" containerName="registry-server" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.747536 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4233099-9bf7-414e-ba92-b4b361c63199" containerName="registry-server" Feb 02 23:31:20 crc kubenswrapper[4755]: E0202 23:31:20.747557 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db9f5df-c29a-49c2-9130-f5066595eb43" containerName="extract-content" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.747562 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db9f5df-c29a-49c2-9130-f5066595eb43" containerName="extract-content" Feb 02 23:31:20 crc kubenswrapper[4755]: E0202 23:31:20.747574 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db9f5df-c29a-49c2-9130-f5066595eb43" containerName="extract-utilities" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.747579 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db9f5df-c29a-49c2-9130-f5066595eb43" containerName="extract-utilities" Feb 02 23:31:20 crc kubenswrapper[4755]: E0202 23:31:20.747587 4755 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4233099-9bf7-414e-ba92-b4b361c63199" containerName="extract-content" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.747593 4755 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4233099-9bf7-414e-ba92-b4b361c63199" containerName="extract-content" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.747789 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="9db9f5df-c29a-49c2-9130-f5066595eb43" containerName="registry-server" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.747813 4755 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4233099-9bf7-414e-ba92-b4b361c63199" containerName="registry-server" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.749376 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kk4hf" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.760841 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kk4hf"] Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.811255 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-952qx\" (UniqueName: \"kubernetes.io/projected/ab9560bd-ea5d-4938-86f6-194645168b9b-kube-api-access-952qx\") pod \"certified-operators-kk4hf\" (UID: \"ab9560bd-ea5d-4938-86f6-194645168b9b\") " pod="openshift-marketplace/certified-operators-kk4hf" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.811333 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9560bd-ea5d-4938-86f6-194645168b9b-catalog-content\") pod \"certified-operators-kk4hf\" (UID: \"ab9560bd-ea5d-4938-86f6-194645168b9b\") " pod="openshift-marketplace/certified-operators-kk4hf" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.811555 4755 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9560bd-ea5d-4938-86f6-194645168b9b-utilities\") pod \"certified-operators-kk4hf\" (UID: \"ab9560bd-ea5d-4938-86f6-194645168b9b\") " pod="openshift-marketplace/certified-operators-kk4hf" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.913717 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9560bd-ea5d-4938-86f6-194645168b9b-utilities\") pod \"certified-operators-kk4hf\" (UID: \"ab9560bd-ea5d-4938-86f6-194645168b9b\") " pod="openshift-marketplace/certified-operators-kk4hf" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.913919 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-952qx\" (UniqueName: \"kubernetes.io/projected/ab9560bd-ea5d-4938-86f6-194645168b9b-kube-api-access-952qx\") pod \"certified-operators-kk4hf\" (UID: \"ab9560bd-ea5d-4938-86f6-194645168b9b\") " pod="openshift-marketplace/certified-operators-kk4hf" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.913954 4755 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9560bd-ea5d-4938-86f6-194645168b9b-catalog-content\") pod \"certified-operators-kk4hf\" (UID: \"ab9560bd-ea5d-4938-86f6-194645168b9b\") " pod="openshift-marketplace/certified-operators-kk4hf" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.914435 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9560bd-ea5d-4938-86f6-194645168b9b-utilities\") pod \"certified-operators-kk4hf\" (UID: \"ab9560bd-ea5d-4938-86f6-194645168b9b\") " pod="openshift-marketplace/certified-operators-kk4hf" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.914451 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9560bd-ea5d-4938-86f6-194645168b9b-catalog-content\") pod \"certified-operators-kk4hf\" (UID: \"ab9560bd-ea5d-4938-86f6-194645168b9b\") " pod="openshift-marketplace/certified-operators-kk4hf" Feb 02 23:31:20 crc kubenswrapper[4755]: I0202 23:31:20.934495 4755 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-952qx\" (UniqueName: \"kubernetes.io/projected/ab9560bd-ea5d-4938-86f6-194645168b9b-kube-api-access-952qx\") pod \"certified-operators-kk4hf\" (UID: \"ab9560bd-ea5d-4938-86f6-194645168b9b\") " pod="openshift-marketplace/certified-operators-kk4hf" Feb 02 23:31:21 crc kubenswrapper[4755]: I0202 23:31:21.066331 4755 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kk4hf" Feb 02 23:31:21 crc kubenswrapper[4755]: I0202 23:31:21.537595 4755 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kk4hf"] Feb 02 23:31:22 crc kubenswrapper[4755]: I0202 23:31:22.111870 4755 generic.go:334] "Generic (PLEG): container finished" podID="ab9560bd-ea5d-4938-86f6-194645168b9b" containerID="0735c01e7f0a2f57ed76839da6885e1e9c851bd78d262c3dd7e6594f6a13d285" exitCode=0 Feb 02 23:31:22 crc kubenswrapper[4755]: I0202 23:31:22.111958 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kk4hf" event={"ID":"ab9560bd-ea5d-4938-86f6-194645168b9b","Type":"ContainerDied","Data":"0735c01e7f0a2f57ed76839da6885e1e9c851bd78d262c3dd7e6594f6a13d285"} Feb 02 23:31:22 crc kubenswrapper[4755]: I0202 23:31:22.112285 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kk4hf" event={"ID":"ab9560bd-ea5d-4938-86f6-194645168b9b","Type":"ContainerStarted","Data":"e6a4c9c49edd9b56c82cb72d997ae0adcdf6d4342602b82397e41e78c7f7135a"} Feb 02 23:31:22 crc kubenswrapper[4755]: I0202 23:31:22.114894 4755 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 23:31:23 crc kubenswrapper[4755]: I0202 23:31:23.125888 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kk4hf" event={"ID":"ab9560bd-ea5d-4938-86f6-194645168b9b","Type":"ContainerStarted","Data":"eeec16ca92ed94d258ad142efbf2248209356d6e9da678f50de7456dedeb1324"} Feb 02 23:31:23 crc kubenswrapper[4755]: I0202 23:31:23.389581 4755 patch_prober.go:28] interesting pod/machine-config-daemon-8q4mc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 23:31:23 crc kubenswrapper[4755]: I0202 23:31:23.389649 4755 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-8q4mc" podUID="bc686b0f-8473-46b8-9d5e-abcddcca635f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 23:31:25 crc kubenswrapper[4755]: I0202 23:31:25.156030 4755 generic.go:334] "Generic (PLEG): container finished" podID="ab9560bd-ea5d-4938-86f6-194645168b9b" containerID="eeec16ca92ed94d258ad142efbf2248209356d6e9da678f50de7456dedeb1324" exitCode=0 Feb 02 23:31:25 crc kubenswrapper[4755]: I0202 23:31:25.156134 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kk4hf" event={"ID":"ab9560bd-ea5d-4938-86f6-194645168b9b","Type":"ContainerDied","Data":"eeec16ca92ed94d258ad142efbf2248209356d6e9da678f50de7456dedeb1324"} Feb 02 23:31:26 crc kubenswrapper[4755]: I0202 23:31:26.168176 4755 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kk4hf" event={"ID":"ab9560bd-ea5d-4938-86f6-194645168b9b","Type":"ContainerStarted","Data":"9cbfc1c0afafe9ff1ea7e6662451655909b59091032f333bf31341b18df64dbe"} Feb 02 23:31:26 crc kubenswrapper[4755]: I0202 23:31:26.197429 4755 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kk4hf" podStartSLOduration=2.423425882 podStartE2EDuration="6.197402377s" podCreationTimestamp="2026-02-02 23:31:20 +0000 UTC" firstStartedPulling="2026-02-02 23:31:22.114467358 +0000 UTC m=+3437.805687694" lastFinishedPulling="2026-02-02 23:31:25.888443863 +0000 UTC m=+3441.579664189" observedRunningTime="2026-02-02 23:31:26.190202694 +0000 UTC m=+3441.881423030" watchObservedRunningTime="2026-02-02 23:31:26.197402377 +0000 UTC m=+3441.888622713" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515140231733024444 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015140231734017362 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015140222631016501 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015140222632015452 5ustar corecore